def build(*args, **kwargs): ip = get_ipython() if 'google_credential_path' in kwargs: credential_path = kwargs['google_credential_path'] del kwargs['google_credential_path'] elif 'Googleapihelper' in ip.config and 'credential_path' in ip.config.Googleapihelper: credential_path = ip.config.Googleapihelper['credential_path'] else: logger.info('Credential store path not specified, trying default: '+ DEFAULT_CREDENTIALS_PATH) credential_path = DEFAULT_CREDENTIALS_PATH if 'google_secrets_path' in kwargs: secrets_path = kwargs['google_secrets_path'] del kwargs['google_secrets_path'] elif 'Googleapihelper' in ip.config and 'secrets_path' in ip.config.Googleapihelper: secrets_path = ip.config.Googleapihelper['secrets_path'] else: logger.info('Secrets path not specified, trying default: '+ DEFAULT_SECRETS_PATH) secrets_path = DEFAULT_SECRETS_PATH if 'redirect_uri' in kwargs: redirect_uri = kwargs['redirect_uri'] del kwargs['redirect_uri'] elif 'Googleapihelper' in ip.config and 'redirect_uri' in ip.config.Googleapihelper: redirect_uri = ip.config.Googleapihelper['redirect_uri'] else: logger.info('Redirect uri not specified, trying default: '+ DEFAULT_REDIRECT_URI) redirect_uri = DEFAULT_REDIRECT_URI if 'scope' in kwargs: scope = kwargs['scope'] del kwargs['scope'] elif 'Googleapihelper' in ip.config and 'scope' in ip.config.Googleapihelper: scope = ip.config.Googleapihelper['scope'] else: logger.error('No scope specified in options or ipython config. '+ 'Store scope in ipython profile or specify on command line.') return None storage = Storage(credential_path) credentials = storage.get() if credentials is None: flow = flow_from_clientsecrets(secrets_path, scope=scope, redirect_uri=redirect_uri) flags, kwargs = extract_flags_from_kwargs(kwargs) tools.run_flow(flow, storage, flags) credentials = storage.get() if credentials is None: logger.error("Unable to retrieve google oauth credentials") return None http = httplib2.Http() http = credentials.authorize(http) kwargs['http'] = http return discovery.build(*args, **kwargs)
def googlePOST(temperature, humidity): flow = client.flow_from_clientsecrets( filepath + "/" + config.get("Config", "client_secrets_file"), scope="https://www.googleapis.com/auth/fusiontables", ) http = httplib2.Http() storage = Storage(filepath + "/" + "sec_storage.enc") parser = argparse.ArgumentParser(parents=[tools.argparser]) flags = parser.parse_args() if storage.get(): credentials = storage.get() else: credentials = tools.run_flow(flow, storage, flags) storage.put(credentials) http_auth = credentials.authorize(http) service = build("fusiontables", "v2", developerKey=config.get("Config", "google_api_key"), http=http_auth) query = service.query() sqlstring = "INSERT INTO {:s} (nodeid, temperature, humidity, date) VALUES ({:d}, {:f}, {:f}, '{:s}');".format( config.get("Config", "table_id"), int(config.get("Config", "node_id")), temperature, humidity, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), ) query.sql(sql=sqlstring).execute()
def getCredentials(secrets_file, tokens_file, scopes, flags): # create an auth flow in case we need to authenticate auth_flow = flow_from_clientsecrets(secrets_file, scope=scopes, message="Visit the APIs Console") # search for existing tokens storage = Storage(tokens_file) credentials = storage.get() if credentials is None or credentials.invalid: run_flow(auth_flow, storage, flags, http=Http()) return storage.get() return credentials
def upload_to_google_drive(file_to_be_uploaded, filename, params, flags): """ CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json') FLOW = client.flow_from_clientsecrets(CLIENT_SECRETS, scope=[ 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/drive.appdata', 'https://www.googleapis.com/auth/drive.apps.readonly', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive.metadata.readonly', 'https://www.googleapis.com/auth/drive.readonly', 'https://www.googleapis.com/auth/drive.scripts', ], message=tools.message_if_missing(CLIENT_SECRETS)) storage = Storage('credentials.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(FLOW, storage, flags) http = httplib2.Http() http = credentials.authorize(http) service = discovery.build('drive', 'v2', http=http) """ storage = Storage('credentials.dat') if not storage.get(): # Authenticate with Google Drive flow = OAuth2WebServerFlow(params.params['client_id'], params.params['client_secret'], params.params['oauth_scope'], params.params['redirect_url']) authorize_url = flow.step1_get_authorize_url() print 'Go to the following link in your browser: ' + authorize_url code = raw_input('Enter verification code: ').strip() credentials = flow.step2_exchange(code) storage.put(credentials) else: credentials = storage.get() http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) print 'Auth success!' # Debug purposes media_body = MediaFileUpload(file_to_be_uploaded, mimetype='application/zip', resumable=True) body = { 'title': filename, 'description': 'Backup ' + params.params['current_date_string'] + ' ' + params.params['current_time_string'], 'mimeType': 'application/zip' } file = drive_service.files().insert(body=body, media_body=media_body).execute()
def store_tokens(access_token, refresh_token): from oauth2client.file import Storage import os py_dir = os.path.dirname(TOKENS_FILE) #os.path.realpath(__file__)) os.chdir(py_dir) # storage_file = os.path.join(os.path.dirname(py_dir), 'calendar.dat') storage_file = os.path.join(py_dir, 'boxapi' + '.dat') store = Storage(storage_file) credentials = store.get() if credentials is None or credentials.invalid == True: store.put((access_token, refresh_token,)) credentials = store.get() else: access_token, refresh_token = credentials return credentials
def get_credentials(): """Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the obtained credential. """ home_dir = os.path.expanduser('~') credential_dir = os.path.join(home_dir, '.credentials') if not os.path.exists(credential_dir): os.makedirs(credential_dir) credential_path = os.path.join(credential_dir, 'pill_cal.json') store = Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES) flow.user_agent = APPLICATION_NAME if flags: credentials = tools.run_flow(flow, store, flags) else: # Needed only for compatibility with Python 2.6 credentials = tools.run(flow, store) print('Storing credentials to ' + credential_path) return credentials
def revJSON(file_ID, file_Name): print "pulling credentials" myCreds = Storage('my_credentials.json') credentials = myCreds.get() print "authorizing HTTP request" http = httplib2.Http() http = credentials.authorize(http) print "building API resource" drive_service = build('drive', 'v2', http=http) data_file = time.strftime("Challenges/" + file_Name + ".json") try: directory = os.path.dirname(data_file) os.makedirs(directory) except: print "directory already exists" for revNum in xrange(1,300): print "calling API" stringRev = str(revNum) try: revisions = drive_service.revisions().get(fileId=file_ID, revisionId=stringRev).execute() rev = open(data_file, "a+") jRevs = json.dump(revisions, rev) rev.write("\n") rev.close() print "Saved Revision %d" % revNum except: print "Revision %d doesn't exist" % revNum
def __enter__(self): # Ensure that we have not re-entered if self.temp_path != None or self.service != None: raise Exception('Cannot use multiple nested with blocks on same Youtube object!') flow = flow_from_clientsecrets( self.client_secrets_path, scope=YOUTUBE_UPLOAD_SCOPE, message=MISSING_CLIENT_SECRETS_MESSAGE) temp_file = NamedTemporaryFile(delete=False) self.temp_path = temp_file.name temp_file.close() storage = Storage(self.temp_path) credentials = storage.get() if credentials is None or credentials.invalid: credentials = run_flow( flow, storage, argparser.parse_args(list()) ) self.service = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, http=credentials.authorize(httplib2.Http())) return self
def _connect(self, scope="https://www.googleapis.com/auth/drive", redirect_url="urn:ietf:wg:oauth:2.0:oob", saved_credentials="drive_cred.txt"): """ Start connection for Google drive :param scope: Service to access :param redirect_url: URI handled by application :param storage: Store authorization credentials to avoid auth every time """ flow = OAuth2WebServerFlow( self._key, self._secret, scope, redirect_url) storage = Storage(saved_credentials) credentials = storage.get() if credentials is None: auth_url = flow.step1_get_authorize_url() print '1. Go to: ' + auth_url print '2. Click "Allow" (you might have to log in first)' print '3. Copy the authorization code.' code = raw_input("Enter the authorization code here: ").strip() credentials = flow.step2_exchange(code) else: print 'Previous authorization will be used!' http = httplib2.Http() http = credentials.authorize(http) print "Authorization successful!" self._client = build("drive", "v2", http=http) storage.put(credentials)
class Gmail: """ Code pour se connecter à l'API Gmail """ def __init__(self, flags, client_secret_file='client_secret.json', oauth_scope='https://www.googleapis.com/auth/gmail.send', storage_file='gmail.storage'): self.__client_secret_file = client_secret_file self.__oauth_scope = oauth_scope self.__storage = Storage(storage_file) self.__connect(flags) #self.gmail_service = None def __connect(self, flags): flow = flow_from_clientsecrets(self.__client_secret_file, scope=self.__oauth_scope) http = httplib2.Http() credentials = self.__storage.get() #print credentials if credentials is None or credentials.invalid: credentials = run_flow(flow, self.__storage, flags, http=http) http = credentials.authorize(http) self.gmail_service = discovery.build('gmail', 'v1', http=http) def getMessagesList(self, userId='me', query=None): return self.gmail_service.users().messages().list(userId=userId, q=query).execute() def getMessageDetails(self, msgId, userId='me'): return GmailMessage(self.gmail_service.users().messages().get(userId=userId, id=msgId).execute()) def markAsRead(self, msgId, userId='me'): self.gmail_service.users().messages().modify(userId=userId, id=msgId, body={'removeLabelIds':['UNREAD'], 'addLabelIds':[]}).execute()
def main(argv): # create the orchestration object gce_cluster = GceCondor(None, PROJECT_ID) # create the top-level parser parser = argparse.ArgumentParser(parents=[argparser]) subparsers = parser.add_subparsers() # create the parser for the "start_cluster" command parser_start = subparsers.add_parser('start', help="start a condor cluster") parser_start.add_argument('node_count', type=int, default=1, help="the number of nodes, including master") parser_start.add_argument('-i', '--image', action="store_true", help="create instance from predefined image") parser_start.set_defaults(func=gce_cluster.start) # create the parser for the "terminate" command parser_terminate = subparsers.add_parser('terminate', help="shutdown cluster, ie terminate all instances in project") parser_terminate.set_defaults(func=gce_cluster.terminate) args = parser.parse_args(argv) # Perform OAuth 2.0 authorization. flow = flow_from_clientsecrets(CLIENT_SECRETS, scope=GCE_SCOPE) storage = Storage(OAUTH2_STORAGE) credentials = storage.get() if credentials is None or credentials.invalid: credentials = run_flow(flow, storage, args) http = httplib2.Http() auth_http = credentials.authorize(http) # update the orchestration object with the authorized Http object gce_cluster.set_auth_http(auth_http) # perform the command args.func(args)
def OAuth2Login(client_secrets, credential_store, email): scope='https://picasaweb.google.com/data/' user_agent='picasawebuploader' storage = Storage(credential_store) credentials = storage.get() if credentials is None or credentials.invalid: flow = flow_from_clientsecrets(client_secrets, scope=scope, redirect_uri='urn:ietf:wg:oauth:2.0:oob') uri = flow.step1_get_authorize_url() webbrowser.open(uri) code = raw_input('Enter the authentication code: ').strip() credentials = flow.step2_exchange(code) if (credentials.token_expiry - datetime.utcnow()) < timedelta(minutes=5): http = httplib2.Http() http = credentials.authorize(http) credentials.refresh(http) storage.put(credentials) gd_client = gdata.photos.service.PhotosService(source=user_agent, email=email, additional_headers={'Authorization' : 'Bearer %s' % credentials.access_token}) return gd_client
def authenService(): CLIENT_SECRETS_FILE = "client_secrets.json" #display following message if file not found MISSING_CLIENT_SECRETS_MESSAGE = """ WARNING: Please configure OAuth 2.0 To make this sample run you will need to populate the client_secrets.json file found at: %s with information from the Developers Console https://console.developers.google.com/ For more information about the client_secrets.json file format, please visit: https://developers.google.com/api-client-library/python/guide/aaa_client_secrets """ % os.path.abspath(os.path.join(os.path.dirname(__file__), CLIENT_SECRETS_FILE)) YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube" YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,message=MISSING_CLIENT_SECRETS_MESSAGE,scope=YOUTUBE_READ_WRITE_SCOPE) storage = Storage("%s-oauth2.json" % sys.argv[0]) credentials = storage.get() if credentials is None or credentials.invalid: flags = argparser.parse_args() credentials = run_flow(flow, storage, flags) youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,http=credentials.authorize(httplib2.Http())) return youtube
class BigQuery(object): def __init__(self): self.flow = flow_from_clientsecrets( file_path('client_secrets.json'), scope='https://www.googleapis.com/auth/bigquery') self.storage = Storage(file_path('bigquery_credentials.dat')) self.credentials = self.storage.get() if self.credentials is None or self.credentials.invalid: self.authorize() self.http = self.credentials.authorize(httplib2.Http()) self.service = build('bigquery', 'v2', http=self.http) def authorize(self): self.credentials = tools.run_flow( self.flow, self.storage, tools.argparser.parse_args()) def query(self, query): query_request = self.service.jobs() query_response = query_request.query( projectId=PROJECT_NUMBER, body={'query': query}).execute() data_points = [] for row in query_response.get('rows', []): result_row = [] for i, value in enumerate(row['f']): column = query_response['schema']['fields'][i]['name'] result_row.append(VALIDATOR[column](value['v'])) data_points.append(result_row) return data_points
def test_pickle_and_json_interop(self): # Write a file with a pickled OAuth2Credentials. access_token = 'foo' client_id = 'some_client_id' client_secret = 'cOuDdkfjxxnv+' refresh_token = '1/0/a.df219fjls0' token_expiry = datetime.datetime.utcnow() token_uri = 'https://www.google.com/accounts/o8/oauth2/token' user_agent = 'refresh_checker/1.0' credentials = OAuth2Credentials( access_token, client_id, client_secret, refresh_token, token_expiry, token_uri, user_agent) f = open(FILENAME, 'w') pickle.dump(credentials, f) f.close() # Storage should be not be able to read that object, as the capability to # read and write credentials as pickled objects has been removed. s = Storage(FILENAME) read_credentials = s.get() self.assertEquals(None, read_credentials) # Now write it back out and confirm it has been rewritten as JSON s.put(credentials) f = file(FILENAME) data = simplejson.load(f) f.close() self.assertEquals(data['access_token'], 'foo') self.assertEquals(data['_class'], 'OAuth2Credentials') self.assertEquals(data['_module'], OAuth2Credentials.__module__)
def _get_service(self): """Create and return the Google Calendar service object, using the credentials file generated through the command:: python manage.py --authorize The service object is used to make API requests to Google Calendar, but will raise IOError if the credentials file is not generated :raises: IOError, NotImplementedError :returns: The Google Calendar service. """ storage = Storage( e.eventum.app.config['EVENTUM_INSTALLED_APP_CREDENTIALS_PATH']) credentials = storage.get() if credentials is None: raise IOError if credentials.invalid is True: raise NotImplementedError http = httplib2.Http() http = credentials.authorize(http) return build('calendar', 'v3', http=http)
def get_credentials(calendar_name): """Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the obtained credential. """ credentials_file = settings.CALENDAR_CREDENTIALS[calendar_name]["credentials_file"] client_secret_file = settings.CALENDAR_CREDENTIALS[calendar_name]["client_secret_file"] project_dir = os.path.dirname(os.path.realpath(__file__)) credential_dir = project_dir if not os.path.exists(credential_dir): os.makedirs(credential_dir) credential_path = os.path.join(credential_dir, credentials_file) store = Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid: client_secret_file_path = os.path.join(project_dir, client_secret_file) flow = client.flow_from_clientsecrets(client_secret_file_path, SCOPES) flow.user_agent = APPLICATION_NAME flags = tools.argparser.parse_args(args=[]) credentials = tools.run_flow(flow, store, flags) logger.info('[google_calendar_backend] Storing GCal acccess credentials to ' + credential_path) return credentials
def get_email_service(config): """Return an authorized Gmail API service instance.""" google_client_secret_path = config.get_google_client_secret_path() print("google_client_secret_path: {0}".format(google_client_secret_path)) # The "scope" scope allows-- # "Create, read, update, and delete drafts. Send messages and drafts." # Check https://developers.google.com/gmail/api/auth/scopes for all # available scopes OAUTH_SCOPE = 'https://www.googleapis.com/auth/gmail.compose' # Start the OAuth flow to retrieve credentials flow = flow_from_clientsecrets(google_client_secret_path, scope=OAUTH_SCOPE) http = httplib2.Http() # Get default flags. parser = argparse.ArgumentParser(parents=[tools.argparser]) flags = parser.parse_args([]) print("debug: flags: {0!r}".format(flags)) # Location of the credentials storage file storage_path = config.get_gmail_storage_path() storage = Storage(storage_path) # Try to retrieve credentials from storage or run the flow to generate them credentials = storage.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, storage, flags=flags, http=http) # Authorize the httplib2.Http object with our credentials http = credentials.authorize(http) # Build the Gmail service from discovery gmail_service = build('gmail', 'v1', http=http) return gmail_service
def __init__(self, config_fields): """ Initialize the Google Drive handler using configuration dictionary fields. Args: config_fields (dict): String dictionary from the configuration segment Configuration Fields: access_token (str): Repository access token OAuth Interactive Configuration Fields: client_id (str): Client Id to use for OAuth validation client_secret (str): Client secret to use for OAuth validation cred_file (str): Full filepath to store credentials used for access. """ flow = OAuth2WebServerFlow(config_fields["client_id"], config_fields["client_secret"], self.SCOPES) try: storage = Storage(config_fields["cred_file"]) except: storage = Storage(os.path.join(os.path.dirname(os.path.realpath(__file__)), "gdc.dat")) credentials = storage.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, storage, flags=googledriveHandler.oauth_flags()) # Create an httplib2.Http object to handle our HTTP requests, and authorize it # using the credentials.authorize() function. http = httplib2.Http() http = credentials.authorize(http) self.client = discovery.build('drive', 'v3', http=http)
def oauth2_login(self): """ :return: """ user_agent = "picasawebuploader" storage = Storage(self.credential_storage + self.user + ".dat") credentials = storage.get() if credentials is None or credentials.invalid: print "Failed to get credential in cache. Going to get new" credentials = self.get_credentials() if (credentials.token_expiry - datetime.utcnow()) < timedelta(minutes=5): http = httplib2.Http() http = credentials.authorize(http) credentials.refresh(http) storage.put(credentials) gd_client = gdata.photos.service.PhotosService( source=user_agent, email=self.user + "@gmail.com", additional_headers={"Authorization": "Bearer %s" % credentials.access_token}, ) return gd_client
def main(): client_secrets_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'client_secrets.json') flow = flow_from_clientsecrets(client_secrets_path, scope='https://www.googleapis.com/auth/bigquery') storage = Storage('bigquery_credentials.dat') credentials = storage.get() if credentials is None or credentials.invalid: # Run oauth2 flow with default arguments. credentials = tools.run_flow(flow, storage, tools.argparser.parse_args([])) http = httplib2.Http() http = credentials.authorize(http) # todo: make a dataset and table? service = build('bigquery', 'v2', http=http) projectId = '907668440978' datasetId = 'isb_cgc' targetTableId = 'fmdata_egfr' sourceCSV = ['gs://fmdata/allDataMerge.EGFR.blankNA.csv'] loadTable(service, projectId, datasetId, targetTableId, sourceCSV, schema_fm)
def authorized_http(client_id, client_secret, apps, file=None): """ Start an authorized HTTP session. Try fetching valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, complete the OAuth2 flow to obtain new credentials. """ if not os.path.exists(CREDENTIAL_DIR): os.makedirs(CREDENTIAL_DIR) credential_path = file or CREDENTIAL_PATH storage = Storage(credential_path) credentials = storage.get() scopes = set([ "https://www.googleapis.com/auth/{0}.readonly".format(app) for app in apps ]) if (not credentials or credentials.invalid or not scopes <= credentials.scopes): flow = OAuth2WebServerFlow( client_id=client_id, client_secret=client_secret, scope=scopes, redirect_uri=REDIRECT_URI) flow.user_agent = USER_AGENT # Do not parse did command-line options by OAuth client flags = tools.argparser.parse_args(args=[]) credentials = tools.run_flow(flow, storage, flags) return credentials.authorize(httplib2.Http())
def __init__(self, job_description, saga_url, pilot_compute_description): self.job_description = job_description self.saga_url = saga_url self.pilot_compute_description = pilot_compute_description self.image_url = GCE_IMAGE_URL if self.pilot_compute_description.has_key("vm_id"): self.image_url = self.pilot_compute_description["vm_id"] self.machine_type = "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/machine-types/n1-standard-1" if self.pilot_compute_description.has_key("vm_type"): self.machine_type = self.pilot_compute_description["vm_type"] self.location = "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/zones/us-east1-a" if self.pilot_compute_description.has_key("vm_location"): self.location = self.pilot_compute_description["vm_location"] self.id="bigjob-" + str(uuid.uuid1()) self.network_ip=None # Do OAUTH authentication storage = Storage('gce.dat') self.credentials = storage.get() if self.credentials is None or self.credentials.invalid == True: flow = OAuth2WebServerFlow( client_id=OAUTH2_CLIENT_ID, client_secret=OAUTH2_CLIENT_SECRET, scope='https://www.googleapis.com/auth/compute', user_agent='bigjob-client/1.0') self.credentials = run(flow, storage)
def setup(self, client_secrets): storage = Storage("credentials.dat") credentials = storage.get() if credentials is None or credentials.invalid: flow = flow_from_clientsecrets( client_secrets, scope="https://www.googleapis.com/auth/genomics", message="You need to copy a client_secrets.json file into this directory, " "or pass in the --client_secrets_filename option to specify where " "one exists. See the README for more help.", ) # There's probably a better way to generate the 'flags' object. Doing it this way for now. parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, parents=[tools.argparser] ) parser.add_argument( "--client_secrets_filename", default=client_secrets, help="The filename of a client_secrets.json file from a " 'Google "Client ID for native application" that ' "has the Genomics API enabled.", ) flags = parser.parse_args() credentials = run_flow(flow, storage, flags) # Create a genomics API service http = httplib2.Http() http = credentials.authorize(http) service = build("genomics", "v1beta2", http=http) return service
def get_authenticated_service(oauth_file): args = namedtuple('flags', [ 'noauth_local_webserver', 'logging_level' ] ) args.noauth_local_webserver = True args.logging_level='ERROR' # how and where tokens are stored storage = Storage(oauth_file) # http://google-api-python-client.googlecode.com/hg/docs/epy/oauth2client.multistore_file-module.html credentials = storage.get() if credentials is None or credentials.invalid: flow = flow_from_clientsecrets( CLIENT_SECRETS_FILE, scope=YOUTUBE_READ_WRITE_SCOPE,) # do the "allow access" step, save token. credentials = run_flow(flow, storage, args) return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, http=credentials.authorize(httplib2.Http()))
def gdrive_login(self): """ Logins to the gdrive using the your credentials - no input needed""" # Authenticate and get a service object attempt_1=0 while attempt_1 < 4: try: # Send in client secret and client ID to the authetication server. Need to set this up in google developer console to get the client secrets and ID # Then need to also activate google analytics in the allowed applications flow = OAuth2WebServerFlow( self.client_id, self.client_secret, 'https://www.googleapis.com/auth/drive') # Stores the credentials in credentials.dat (i think) storage = Storage('credentials_gdrive.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(flow, storage) # Use the credentials to get authentication? # Finally if this is the first time, your browser should pop to ask for login and permission allowing app http = httplib2.Http() http = credentials.authorize(http) self.service_gd = build('drive', 'v2', http=http) attempt_1=100 except Exception as e_connection: attempt_1+=1 self.logger.info('Exception is: '+str(e_connection)) self.logger.info('Attempt number '+str(attempt_1)) time.sleep(7) pass print ('Exception is: '+str(e_connection)+'\n'+'Attempt number '+str(attempt_1))
def Authenticate(self): f = Auth(KEYS_FILE) # OAuth 2.0 Authentication flow = OAuth2WebServerFlow( client_id=f.GetClientId(), client_secret=f.GetClientSecret(), scope='https://www.googleapis.com/auth/tasks', user_agent='Tasky/v1') # If credentials don't exist or are invalid, run through the native client # flow. The Storage object will ensure that if successful, the good # Credentials will get written back to a file. storage = Storage(os.path.join(TASKY_DIR, 'tasks.dat')) credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(flow, storage) http = httplib2.Http() http = credentials.authorize(http) # The main Tasks API object. self.service = build( serviceName='tasks', version='v1', http=http, developerKey=f.GetApiKey())
def __build_service(self): """ Creates the service object and based on the credentials stored in config.py """ flow = OAuth2WebServerFlow( client_id=config.client_id, client_secret=config.client_secret, scope='https://www.googleapis.com/auth/calendar', user_agent=config.user_agent) storage = Storage(self.credentials_filename) credentials = storage.get() if credentials is None or credentials.invalid == True: credentials = run(flow, storage) http = httplib2.Http() http = credentials.authorize(http) service = build( serviceName="calendar", version="v3", http=http, developerKey=config.developer_key) return service
def GetCredentials(flags, scope_list): """Retrieve saved credentials or create and save credentials using flow. Args: flags: argparse parsed flags object. scope_list: List of strings reflecting desired API access (scope) e.g.: ['https://www.googleapis.com/auth/directory.user']. Returns: An oauth2client Credentials() object. """ client_file_storage = Storage( FILE_MANAGER.BuildFullPathToFileName(_CURRENT_ACCESS_FILE_NAME)) credentials = client_file_storage.get() if credentials is None or credentials.invalid: client_secrets_path = FILE_MANAGER.BuildFullPathToFileName( _CLIENT_SECRETS_FILE_NAME, work_dir=False) missing_secrets_msg = _MISSING_CLIENT_SECRETS_MSG % client_secrets_path flow_manager = flow_from_clientsecrets(client_secrets_path, scope=scope_list, message=missing_secrets_msg) credentials = run_flow(flow_manager, client_file_storage, flags) if not credentials: log_utils.LogError('Unable to retrieve valid credentials.') sys.exit(-1) return credentials
def main(): storage = Storage('bigquery_credentials.dat') credentials = storage.get() if credentials is None or credentials.invalid: # Run oauth2 flow with default arguments. credentials = tools.run_flow(FLOW, storage, tools.argparser.parse_args([])) http = httplib2.Http() http = credentials.authorize(http) bigquery_service = build('bigquery', 'v2', http=http) try: query_request = bigquery_service.jobs() query_data = {'query':'SELECT TOP( title, 10) as title, COUNT(*) as revision_count FROM [publicdata:samples.wikipedia] WHERE wp_namespace = 0;'} query_response = query_request.query(projectId=PROJECT_NUMBER, body=query_data).execute() print 'Query Results:' for row in query_response['rows']: result_row = [] for field in row['f']: result_row.append(field['v']) print ('\t').join(result_row) except HttpError as err: print 'Error:', pprint.pprint(err.content) except AccessTokenRefreshError: print ("Credentials have been revoked or expired, please re-run" "the application to re-authorize")
def create_event(request): branch = request.POST.get("branch") startTime = request.POST.get("startTime") endTime = request.POST.get("endTime") startDate = request.POST.get("startDate") endDate = request.POST.get("endDate") summary = request.POST.get("summary") print(startDate, endDate, startTime, endTime) fileName = request.user.username storage = Storage(fileName) credentials = storage.get() print(credentials) http = httplib2.Http() http = credentials.authorize(http) print(request.user.username) schoolObj = School.objects.get(principal=request.user) print(schoolObj) print(schoolObj.location) location = schoolObj.location #print(location) service = build('calendar', 'v3', http=http) event = { 'summary': summary, 'location': location, 'description': branch, 'start': { 'dateTime': startDate + 'T' + startTime + ":00", 'timeZone': '(GMT+05.30)', }, 'end': { 'dateTime': endDate + 'T' + endTime + ":00", 'timeZone': '(GMT+05.30)', }, 'recurrence': ['RRULE:FREQ=DAILY;COUNT=1'], 'attendees': [ { 'email': '*****@*****.**' }, { 'email': '*****@*****.**' }, ], 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } event = service.events().insert(calendarId='primary', body=event).execute() return redirect("/school")
class GoogleOAuth2(object): ''' class to manage oauth flow easily and store credentials. refresh token is used if needed''' # depends on the token you need (online, offline, native, mobile...) # see https://developers.google.com/accounts/docs/OAuth2 REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob' ACCESS_TYPE = 'offline' # range of the authorisation on google services DEFAULT_SCOPE = 'https://www.googleapis.com/auth/drive' def __init__(self, client_id, client_secret, scope=None, store=None): if not scope: scope = self.DEFAULT_SCOPE self.scope = scope self.client_id = client_id self.client_secret = client_secret self._credentials = None self.flow = None self.storage = None if store: self.storage = Storage(store) def set_token(self, *args, **kwargs): ''' called by the oauth process when user submitted final token ''' code = args[0] try: credentials = self.flow.step2_exchange(code) except FlowExchangeError: credentials = None if credentials and self.storage: self.storage.put(credentials) return credentials def ask_code(self, authorize_url, callback): ''' default oauth process : use console only ''' print("open %s in your browser and paste the code below" % authorize_url) code = raw_input('Enter verification code: ').strip() return callback(code) def get_credentials(self, force=False, callback=None): ''' start the oauth process. refresh token if expired and refreshable. ''' if not callback: callback = self.ask_code credentials = None if self.storage: credentials = self.storage.get() if credentials and credentials.access_token_expired and credentials.refresh_token: # refresh existing expired token http = httplib2.Http() http = credentials.authorize(http) credentials.refresh(http) if force or not credentials: self.flow = OAuth2WebServerFlow(self.client_id, self.client_secret, self.scope, self.REDIRECT_URI, access_type=self.ACCESS_TYPE) authorize_url = urllib.unquote(self.flow.step1_get_authorize_url()) return callback(authorize_url, self.set_token) return credentials def get_token(self, force=False, callback=None): creds = self.get_credentials(force=force, callback=callback) if creds: return creds.token_response.get('access_token') return None
def get_credentials(): """Using the fake user name as a key, retrieve the credentials.""" storage = Storage( os.path.dirname(__file__) + '\\gcal\\credentials-%s.dat' % client_id) return storage.get()
import os import httplib2 from oauth2client import tools from oauth2client import client from oauth2client.file import Storage credentials_path = "credentials.json" store = Storage(credentials_path) credentials = store.get() if credentials is None or credentials.invalid: f = "client.json" scope = "https://www.googleapis.com/auth/youtube.readonly" flow = client.flow_from_clientsecrets(f, scope) flow.user_agent = "YouTube Live Comment" credentials = tools.run_flow(flow, Storage(credentials_path))
def createEventt(self): storage1 = Storage('info3.dat') credentials = storage1.get() if credentials is None or credentials.invalid == True: credentials = tools.run_flow(FLOW, storage1) print(credentials) # Create an httplib2.Http object to handle our HTTP requests and # authorize it with our good Credentials. http = httplib2.Http() http = credentials.authorize(http) service = build('calendar', 'v3', http=http) people_service = build(serviceName='people', version='v1', http=http) print("authorized") # To get a list of people in the user's contacts, results = people_service.people().connections().list( resourceName='people/me', pageSize=100, personFields='names,emailAddresses', fields='connections,totalItems,nextSyncToken').execute() connections = results.get('connections', []) #need to verify this #self.speak(connections) #get informations about the event tittle = self.get_response("what is the name of the event") description = self.get_response("can you describe more the event") strtdate = self.get_response("when the event starts") st = extract_datetime(strtdate) enddate = self.get_response("when the event ends") et = extract_datetime(enddate) st = st[0] - self.utc_offset et = et[0] - self.utc_offset datestart = st.strftime('%Y-%m-%dT%H:%M:00') datend = et.strftime('%Y-%m-%dT%H:%M:00') datend += UTC_TZ datestart += UTC_TZ # getting contacts emails and names in two lists nameliste and adsmails nameListe = [] adsmails = [] # attendee est la liste des invités qui sont disponibles attendee = [] for person in connections: emails = person.get('emailAddresses', []) names = person.get('names', []) adsmails.append(emails[0].get('value')) nameListe.append(names[0].get('displayName')) # liste des emails de toutes les salles de focus freemails = [] freerooms = [] nameroom = [ "Midoune Meeting Room", "Aiguilles Meeting Room", "Barrouta Meeting Room", "Kantaoui Meeting Room", "Gorges Meeting Room", "Ichkeul Meeting Room", "Khemir Meeting Room", "Tamaghza Meeting Room", "Friguia Meeting Room", "Ksour Meeting Room", "Medeina Meeting Room", "Thyna Meeting Room" ] emailroom = [ "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**" ] for i in range(0, len(emailroom)): body = { "timeMin": datestart, "timeMax": datend, "timeZone": 'America/Los_Angeles', "items": [{ "id": emailroom[i] }] } roomResult = service.freebusy().query(body=body).execute() room_dict = roomResult[u'calendars'] for cal_room in room_dict: print(cal_room, ':', room_dict[cal_room]) case = room_dict[cal_room] for j in case: if (j == 'busy' and case[j] == []): # la liste freerooms va prendre les noms des salles free freerooms.append(nameroom[i]) freemails.append(emailroom[i]) suggroom = freerooms[0] suggmail = freemails[0] reservation = self.get_response( 'do you need to make a reservation for a meeting room? Yes or No?') if reservation == 'yes': self.speak_dialog("suggestionroom", data={"suggroom": suggroom}) x = self.get_response( "Do you agree making a reservation for this meeting room") if x == "yes": room = suggroom attendee.append(suggmail) else: s = ",".join(freerooms) # print("les salles disponibles a cette date sont", freerooms) self.speak_dialog("freerooms", data={"s": s}) room = self.get_response( 'which Room do you want to make a reservation for??') for i in range(0, len(freerooms)): if (freerooms[i] == room): # ajouter l'email de room dans la liste des attendees attendee.append(freemails[i]) else: room = '' #adding attendees #verify if the attendee in the connection liste and if he is free confirm = self.get_response( "Do you want to invite someone? yes or no?") if confirm == 'yes': n_attendee = self.get_response( " how many persons would you like to invite") n = extract_number(n_attendee) nb = n print(n) j = 0 while j < n: exist = False x = self.get_response("who do you want to invite") for l in range(0, len(nameListe)): if x == nameListe[l]: self.speak_dialog("exist") exist = True mail = adsmails[l] #attendee.append(mail) # on va verifier la disponibilité de chaque invité #methode avec freebusy body = { "timeMin": datestart, "timeMax": datend, "timeZone": 'America/Los_Angeles', "items": [{ "id": mail }] } eventsResult = service.freebusy().query( body=body).execute() cal_dict = eventsResult[u'calendars'] print(cal_dict) for cal_name in cal_dict: print(cal_name, ':', cal_dict[cal_name]) statut = cal_dict[cal_name] for i in statut: if (i == 'busy' and statut[i] == []): self.speak_dialog("free", data={"att": x}) attendee.append(mail) #ajouter l'email de x ala liste des attendee elif (i == 'busy' and statut[i] != []): self.speak_dialog("busy", data={"att": x}) nb -= 1 if exist == False: self.speak_dialog("notexist") j += 1 attendeess = [] for i in range(len(attendee)): email = {'email': attendee[i]} attendeess.append(email) notification = self.get_response( 'would you like to send notification to attendees?') if notification == 'yes': notif = True, else: notif = False #creation d'un evenement event = { 'summary': tittle, 'location': room, 'description': description, 'start': { 'dateTime': datestart, 'timeZone': 'America/Los_Angeles', }, 'end': { 'dateTime': datend, 'timeZone': 'America/Los_Angeles', }, 'recurrence': ['RRULE:FREQ=DAILY;COUNT=1'], 'attendees': attendeess, 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } if nb == 0: self.speak_dialog("cancellEvent") elif nb == n: event = service.events().insert(calendarId='primary', sendNotifications=notif, body=event).execute() print('Event created: %s' % (event.get('htmlLink'))) self.speak_dialog("eventCreated") else: res = self.get_response( 'Some of the attendees are busy would you like to continue creating the event yes or no?' ) if res == 'yes': event = service.events().insert(calendarId='primary', sendNotifications=notif, body=event).execute() print('Event created: %s' % (event.get('htmlLink'))) self.speak_dialog("eventCreated") elif res == 'no': self.speak_dialog("eventCancelled")
def get_outh_credentials(client_secret_file, credential_dir=None, outh_nonlocal=False): """Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. :param client_secret_file: path to outh2 client secret file :param credential_dir: path to directory where tokens should be stored 'global' if you want to store in system-wide location None if you want to store in current script directory :param outh_nonlocal: if the authorization should be done in another computer, this will provide a url which when run will ask for credentials :return Credentials, the obtained credential. """ lflags = flags if credential_dir == 'global': home_dir = os.path.expanduser('~') credential_dir = os.path.join(home_dir, '.credentials') if not os.path.exists(credential_dir): os.makedirs(credential_dir) elif not credential_dir: credential_dir = os.getcwd() else: pass # verify credentials directory if not os.path.isdir(credential_dir): raise IOError(2, "Credential directory does not exist.", credential_dir) credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-python.json') # check if refresh token file is passed with warnings.catch_warnings(): warnings.simplefilter("ignore") try: store = Storage(client_secret_file) credentials = store.get() except KeyError: credentials = None # else try to get credentials from storage if not credentials or credentials.invalid: try: with warnings.catch_warnings(): warnings.simplefilter("ignore") store = Storage(credential_path) credentials = store.get() except KeyError: credentials = None # else get the credentials from flow if not credentials or credentials.invalid: # verify client secret file if not os.path.isfile(client_secret_file): raise IOError(2, "Client secret file does not exist.", client_secret_file) # execute flow flow = client.flow_from_clientsecrets(client_secret_file, SCOPES) flow.user_agent = 'pygsheets' if lflags: lflags.noauth_local_webserver = outh_nonlocal credentials = tools.run_flow(flow, store, lflags) else: # Needed only for compatibility with Python 2.6 credentials = tools.run(flow, store) print('Storing credentials to ' + credential_path) return credentials
def main(argv): global altuser global myuser global users global service global config users = ('my_user_name', 'another_user_name') myuser = getlogin() altuser = '' for x in users: if myuser != x: altuser = x break # 2017-02-25: Set up data for OAuth Flow object (client secrets): script_real_path = dirname(realpath(__file__)) gdata_constants = config['gdata_constants'] CLIENT_SECRETS = join(script_real_path, gdata_constants['secrets']) OAUTH2_STORAGE = join(script_real_path, gdata_constants['datfile']) GCAL_SCOPE = gdata_constants['gcal_scope'] # Calendar feeds defined in the config JSON file all_cals = config['calendars'] scope = 'https://www.google.com/calendar/feeds/' outfile = join(script_real_path, config['outfile']) # 2017-02-25: Moved gflags here FLAGS = gflags.FLAGS LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] gflags.DEFINE_enum('logging_level', 'INFO', LOG_LEVELS, 'Set the level of logging detail.') gflags.DEFINE_boolean('noauth_local_webserver', False, 'Disable the local server feature.') gflags.DEFINE_list('auth_host_port', [8080, 8090], 'Set the auth host port.') gflags.DEFINE_string('auth_host_name', 'localhost', 'Set the auth host name.') # To disable the local server feature, uncomment the following line: # FLAGS.auth_local_webserver = False # FLAGS.noauth_local_webserver = True # 2017-02-25: Parse arguments first before initiating OAuth2 Flow parser = argparse.ArgumentParser( description='Fetch Calendar data', formatter_class=argparse.RawDescriptionHelpFormatter, parents=[tools.argparser]) parser.add_argument('-d', '--days', action='store', dest='days', type=int, help='number of days to fetch events') parser.add_argument('-c', '--cals', action='append', dest='cals', help='calendars to fetch') parser.add_argument('-t', '--test', action='store', dest='test', choices=('yes', 'no'), help='don\'t write to file') parser.add_argument('-u', '--user', action='store', dest='user', help='switch current user') parser.add_argument('-r', '--transparents', action='store', dest='transparents', choices=('only', 'no', 'both'), help='select what to do with transparent events') parser.set_defaults(days=7, cals=[myuser], test='no', skip='yes') # Parse the command-line flags. flags = parser.parse_args(argv[1:]) if flags.days < 1 or flags.days > 365: parser.error("-d, --days option must be between 1 and 365") if flags.user != None: altuser = myuser myuser = flags.user delta = timedelta(flags.days) for x in flags.cals: if x not in [y['name'] for y in all_cals]: parser.error("-c, --cals option not defined: " + x) # Set up a Flow object to be used if we need to authenticate. This # sample uses OAuth 2.0, and we set up the OAuth2WebServerFlow with # the information it needs to authenticate. Note that it is called # the Web Server Flow, but it can also handle the flow for native # applications # The client_id and client_secret can be found in Google Developers Console # Perform OAuth 2.0 authorization. FLOW = flow_from_clientsecrets(CLIENT_SECRETS, scope=GCAL_SCOPE) # If the Credentials don't exist or are invalid, run through the native client # flow. The Storage object will ensure that if successful the good # Credentials will get written back to a file. storage = Storage(OAUTH2_STORAGE) credentials = storage.get() if credentials is None or credentials.invalid: credentials = run_flow(FLOW, storage, FLAGS) #if credentials is None or credentials.invalid == True: # credentials = run(FLOW, storage) # Create an httplib2.Http object to handle our HTTP requests and authorize it # with our good Credentials. http = httplib2.Http() http = credentials.authorize(http) # Build a service object for interacting with the API. Visit # the Google Developers Console # to get a developerKey for your own application. service = build(serviceName='calendar', version='v3', http=http) start_date = date.today() end_date = start_date + delta def in_option_cals(x): return x['name'] in flags.cals def make_calfeed(special): retval = None if special: retval = WorkCalFeed() else: retval = CalFeed() return retval # Set up calendar feeds calfeeds = [] for a_cal in filter(in_option_cals, all_cals): a_calfeed = make_calfeed(a_cal['special']) transparents = a_cal['transparents'] if flags.transparents != None: if flags.transparents.lower() == 'only': transparents = 'only' elif flags.transparents.lower() == 'no': transparents = 'no' else: transparents = 'both' a_calfeed.setup(a_cal['name'], a_cal['owner'], a_cal['userID'], a_cal['visibility'], a_cal['projection'], transparents, a_cal['hilitecolor'], start_date, end_date) calfeeds.append(a_calfeed) # Parse events events = [] for a_calfeed in calfeeds: a_calfeed.fetch_events() # DEBUG: print a_calfeed.name + '\t' + str(a_calfeed.count()) + '\t' + str(a_calfeed) # DEBUG: events.append('-- ' + a_calfeed.name + ' --') events.extend(a_calfeed.list_events()) # DEBUG: print len(events) if flags.test == 'no': # with open(outfile, 'w') as f: with codecs.open(outfile, encoding='utf-8', mode='w') as f: f.write('\n'.join(sorted(events))) # If using GeekTool, refresh the widgets. See https://www.tynsoe.org and http://flipmartin.net/software/applescript-tips-for-geektool-3 # system("osascript -e 'tell application \"GeekTool Helper\" to refresh all'") else: str = '\n'.join(sorted(events)) if str: print(str)
def main(argv): """Demos the setting of the access properties by the Groups Settings API.""" usage = 'usage: %prog [options]' parser = OptionParser(usage=usage) parser.add_option('--groupId', help='Group email address') parser.add_option('--whoCanInvite', help='Possible values: ALL_MANAGERS_CAN_INVITE, ' 'ALL_MEMBERS_CAN_INVITE') parser.add_option('--whoCanJoin', help='Possible values: ALL_IN_DOMAIN_CAN_JOIN, ' 'ANYONE_CAN_JOIN, CAN_REQUEST_TO_JOIN, ' 'CAN_REQUEST_TO_JOIN') parser.add_option('--whoCanPostMessage', help='Possible values: ALL_IN_DOMAIN_CAN_POST, ' 'ALL_MANAGERS_CAN_POST, ALL_MEMBERS_CAN_POST, ' 'ANYONE_CAN_POST, NONE_CAN_POST') parser.add_option('--whoCanViewGroup', help='Possible values: ALL_IN_DOMAIN_CAN_VIEW, ' 'ALL_MANAGERS_CAN_VIEW, ALL_MEMBERS_CAN_VIEW, ' 'ANYONE_CAN_VIEW') parser.add_option('--whoCanViewMembership', help='Possible values: ALL_IN_DOMAIN_CAN_VIEW, ' 'ALL_MANAGERS_CAN_VIEW, ALL_MEMBERS_CAN_VIEW, ' 'ANYONE_CAN_VIEW') (options, args) = parser.parse_args() if options.groupId is None: print 'Give the groupId for the group' parser.print_help() return settings = {} if (options.whoCanInvite or options.whoCanJoin or options.whoCanPostMessage or options.whoCanPostMessage or options.whoCanViewMembership) is None: print 'No access parameters given in input to update access permissions' parser.print_help() else: settings = {'whoCanInvite': options.whoCanInvite, 'whoCanJoin': options.whoCanJoin, 'whoCanPostMessage': options.whoCanPostMessage, 'whoCanViewGroup': options.whoCanViewGroup, 'whoCanViewMembership': options.whoCanViewMembership} # Set up a Flow object to be used if we need to authenticate. FLOW = flow_from_clientsecrets(CLIENT_SECRETS, scope='https://www.googleapis.com/auth/apps.groups.settings', message=MISSING_CLIENT_SECRETS_MESSAGE) storage = Storage('groupsettings.dat') credentials = storage.get() if credentials is None or credentials.invalid: print 'invalid credentials' # Save the credentials in storage to be used in subsequent runs. credentials = run(FLOW, storage) # Create an httplib2.Http object to handle our HTTP requests and authorize it # with our good Credentials. http = httplib2.Http() http = credentials.authorize(http) service = build('groupssettings', 'v1', http=http) access_settings(service=service, groupId=options.groupId, settings=settings)
# Path to the client_secret.json file downloaded from the Developer Console CLIENT_SECRET_FILE = 'credentials.json' # Check https://developers.google.com/gmail/api/auth/scopes for all available scopes OAUTH_SCOPE = 'https://www.googleapis.com/auth/gmail.compose' # Location of the credentials storage file STORAGE = Storage('gmail.storage') # Start the OAuth flow to retrieve credentials flow = flow_from_clientsecrets(CLIENT_SECRET_FILE, scope=OAUTH_SCOPE) http = httplib2.Http() # Try to retrieve credentials from storage or run the flow to generate them credentials = STORAGE.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, STORAGE, http=http) # Authorize the httplib2.Http object with our credentials http = credentials.authorize(http) # Build the Gmail service from discovery gmail_service = build('gmail', 'v1', http=http) for i in range(0, Excel_Sheet.nrows): name_of_candidate = Excel_Sheet.row_values(i)[0] print(name_of_candidate) text_msg = """\ Hey """ + name_of_candidate + """ ,
def authorize_token(): storage = Storage(REFRESH_TOKEN_PATH) credentials = storage.get() http = credentials.authorize(httplib2.Http()) credentials.refresh(http) return build(API_NAME, API_VER, credentials=credentials)
class YtModifier: def __init__(self): with open("listas_de_reproduccion.json") as file: dict_aux = json.loads(file.read()) self.dict_listas = dict_aux for nombre_lista, dict_1 in self.dict_listas["playlists"].items(): cuantas_tracks = len(dict_1["tracks"]) print(nombre_lista, cuantas_tracks) self.flow = flow_from_clientsecrets( CLIENT_SECRETS_FILE, message=MISSING_CLIENT_SECRETS_MESSAGE, scope=YOUTUBE_READ_WRITE_SSL_SCOPE) self.storage = Storage(os.path.abspath("%s-oauth2.json" % sys.argv[0])) self.credentials = self.storage.get() if self.credentials is None or self.credentials.invalid: self.flags = argparser.parse_args() self.credentials = run_flow(self.flow, self.storage, self.flags) self.youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, http=self.credentials.authorize(httplib2.Http())) self.nombre_lista_elegida = "" self.id_lista_elegida = "" self.termino_busqueda = "" self.nombre_video_datos = "" self.video_comentario_elegido = "" self.channel_id = "" self.texto_comentario = "" self.lista_ids_stats = [] self.lista_resultados_stats = [] self.dict_ids = {} self.menu_inicial = MenuInicial(self.dict_listas, self) self.menu_inicial.show() def crear_lista_rep(self): playlists_insert_response = self.youtube.playlists().insert( part="snippet,status", body=dict(snippet=dict( title=self.dict_listas["playlists"][ self.nombre_lista_elegida]["name"], description="A private playlist created with the YouTube API v3" ), status=dict(privacyStatus="private"))).execute() self.id_lista_elegida = playlists_insert_response["id"] self.dict_ids.update( {self.nombre_lista_elegida: { self.id_lista_elegida: {} }}) self.channel_id = playlists_insert_response["snippet"]["channelId"] def youtube_search(self): for track in self.dict_listas["playlists"][ self.nombre_lista_elegida]["tracks"]: self.termino_busqueda = track["name"] try: search_response = self.youtube.search().list( q=self.termino_busqueda, part="id,snippet", maxResults=1, type="video").execute() for search_result in search_response.get("items", []): if search_result["id"]["kind"] == "youtube#video": self.dict_ids[self.nombre_lista_elegida][ self.id_lista_elegida].update({ search_result["snippet"]["title"]: search_result["id"]["videoId"] }) except HttpError as e: print("An HTTP error %d occurred:\n%s" % (e.resp.status, e.content)) def añadir_a_lista(self): for nombre_video, id_video in self.dict_ids[self.nombre_lista_elegida][ self.id_lista_elegida].items(): playlistitems_insert_response = self.youtube.playlistItems( ).insert(part="snippet", body=dict(snippet=dict(playlistId=self.id_lista_elegida, resourceId={ "kind": "youtube#video", "videoId": id_video }))).execute() def insert_comment(self): print(self.video_comentario_elegido) insert_result = self.youtube.commentThreads().insert( part="snippet", body=dict(snippet=dict( channelId=self.channel_id, videoId=self.video_comentario_elegido[3], topLevelComment=dict(snippet=dict( textOriginal=self.texto_comentario))))).execute() def get_statistics(self): dict_stat = {} for id in self.lista_ids_stats: stats_result = self.youtube.videos().list( part="snippet,statistics", id=id).execute() nombre = stats_result["items"][0]["snippet"]["title"] like = stats_result["items"][0]["statistics"].get("likeCount", 0) dislike = stats_result["items"][0]["statistics"].get( "dislikeCount", 0) comments = stats_result["items"][0]["statistics"].get( "commentCount", 0) total = stats_result["items"][0]["statistics"].get("viewCount", 0) self.lista_resultados_stats.append( (nombre, id, like, dislike, comments, total)) print(self.lista_resultados_stats)
def get_run_ranges(url, sheet_number=0): """ Return 2d list of the format [[start run, end run, label1, label2], ......] corresponding to the contents of the spreadsheet at the url """ def to_ndarr(list_2d): return np.array([np.array(row) for row in list_2d]) def merge_horizontal(arr1, arr2): return [r1 + r2 for r1, r2 in zip(arr1, arr2)] def fill_with_None(list_2d): maxlen = max(map(len, list_2d)) newl = [] for row in list_2d: newl.append(row + [None] * (maxlen - len(row))) return newl def fill_to_match(arr1, arr2, target_num_labels=2): if len(arr1) > len(arr2): return arr1, arr2 + [[None] * target_num_labels ] * (len(arr1) - len(arr2)) else: return arr1 + [target_num_labels * [None] ] * (len(arr2) - len(arr1)), arr2 def str_to_range(run_string): """ Converts string of format "abcd,efgh" to list [abcd, efgh] Returns ValueError on incorrectly-formatted range entries. """ if not run_string: return [None, None] else: split = run_string.split('-') try: map(int, split) # values convertible to ints? if len(split) == 1: return 2 * split elif len(split) == 2: return split else: raise ValueError("Invalid run range format: ", run_string) except: raise ValueError("Invalid run range format: ", run_string) storage = Storage(utils.resource_path('data/credentials')) credentials = storage.get() gc = gspread.authorize(credentials) document = gc.open_by_url(url) worksheet = document.get_worksheet(sheet_number) cols = [worksheet.find(lab).col for lab in LABELS] run_col = worksheet.find(RUN_HEADER).col runs = map(str_to_range, worksheet.col_values(run_col)[1:]) labels = fill_with_None([worksheet.col_values(col)[1:] for col in cols]) labels = map(list, zip(*fill_with_None(labels))) runs, labels = fill_to_match(runs, labels) combined = merge_horizontal(runs, labels) return filter(lambda x: x[0], combined)
from googleapiclient import discovery from tinydb import TinyDB db = TinyDB('entities.json') group_table = db.table('Instance Groups') template_table = db.table('Instance Templates') credentials = GoogleCredentials.get_application_default() from oauth2client.file import Storage storage = Storage('creds.data') service = discovery.build('compute', 'v1', credentials=storage.get()) instanceGroups = service.instances() instanceTemplates = service.instanceTemplates() zones = service.zones() def insert_templates(): projectId = TinyDB('projects.json').table("Project").all() request = instanceTemplates.list(project=projectId) try: while request is not None: response = request.execute() for instanceTemplate in response['items']: template_table.insert(instanceTemplate) request = instanceTemplates.list_next(previous_request=request, previous_response=response) except KeyError: pass def insert_instance_groups(): projectId = TinyDB('projects.json').table("Project").all()
def main(): # Getting the credentials for G.People API storage = Storage('info.dat') credentials = storage.get() if credentials is None or credentials.invalid is True: credentials = tools.run_flow(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) people_service = build(serviceName='people', version='v1', http=http) results = people_service.people().connections().list( resourceName='people/me', pageSize=10, personFields='emailAddresses,names').execute() connections = results.get('connections', []) # Getting the credentials for G.Calendar API creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server() # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) service = build('calendar', 'v3', credentials=creds) # Get inputs print("What's the name of the event?") n = input() l = "FOCUS-1ere-Midoune Meeting Room (10)" """Shows basic usage of the Google Calendar API. Prints the start and name of the next 10 events on the user's calendar. """ events_result = service.events().list(calendarId='primary', maxResults=1, singleEvents=True, orderBy='startTime', q=n).execute() events = events_result.get('items', []) if not events: print('event not found.') for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start, event['summary']) eventid = event['id'] listeofattendees = event['attendees'] #print(event['id']) #print(eventid) print( 'what do you want to update? Summery - Start Date/Time - End Date/Time - Location - Attendees - Meeting Room?' ) u = input() if u == 'Summery': print('what\'s the new summery of the event') ns = input() event['summary'] = ns updated_event = service.events().update(calendarId='primary', eventId=eventid, body=event).execute() elif u == 'Location': print('what\'s the new location of the event') r = input() maxattendees = 10 if r == "Midoun meeting room": room = "*****@*****.**" elif r == "Aiguilles Meeting Room": room = "*****@*****.**" elif r == "Barrouta Meeting Room": room = "*****@*****.**" elif r == "Kantaoui Meeting Room": room = "*****@*****.**" elif r == "Gorges Meeting Room": room = "*****@*****.**" elif r == "Ichkeul Meeting Room": room = "*****@*****.**" elif r == "Khemir Meeting Room": room = "*****@*****.**" elif r == "Tamaghza Meeting Room": room = "*****@*****.**" elif r == "Friguia Meeting Room": room = "*****@*****.**" maxattendees = 15 elif r == "Ksour Meeting Room": room = "*****@*****.**" elif r == "Medeina Meeting Room": room = "*****@*****.**" elif r == "Thyna Meeting Room": room = "*****@*****.**" listofroomsadress = [ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ] listofroomsnames = [ 'Midoun meeting room', 'Aiguilles Meeting Room', 'Barrouta Meeting Room', 'Kantaoui Meeting Room', 'Gorges Meeting Room', 'Ichkeul Meeting Room', 'Khemir Meeting Room', 'Tamaghza Meeting Room', 'Friguia Meeting Room', 'Ksour Meeting Room', 'Medeina Meeting Room', 'Thyna Meeting Room' ] o = 0 p = 0 t = 0 y = 0 attendemail = [] # meetingroom = [] attendname = [] finallist = [] #mr = {'email': room} # meetingroom.append(mr) # event['attendees'] = meetingroom attend = event['attendees'] l = len(attend) while o != l: attendemail.append(attend[o]['email']) attendname.append(attend[0].get('displayName')) o = o + 1 while p != len(attendemail): while t != len(listofroomsadress): if attendemail[p] == listofroomsadress[t]: attendemail[p] = room attendname[p] == r t = t + 1 p = p + 1 while y != len(attendemail): mr = {'email': attendemail[y]} finallist.append(mr) y = y + 1 event['attendees'] = finallist event['location'] = r updated_event = service.events().update(calendarId='primary', eventId=eventid, body=event).execute() #updated_event = service.events().insert(calendarId='primary', eventId=eventid, body=event).execute() # updating the attendees elif u == 'Attendees': # Getting the all ready invited attendees invitedattendees = event['attendees'] invitedattendemail = [] invitedattendname = [] finallist = [] o = 0 l = len(invitedattendees) while o != l: invitedattendemail.append(invitedattendees[o]['email']) invitedattendname.append(invitedattendees[o].get('displayName')) o = o + 1 # at this stage we have 3 lists # 1) invitedattend[] which is what we get from the google calendar # 2) invitedattendname[]the list of names of each attendee # 3) invitedattendemail[] the list of emails of each attendee # Now we have to figure out the number of attendees that we can add no more then the capacity of the room maxattendees = 10 if event['location'] == 'Friguia Meeting Room': maxattendees = 15 print('how many attendees would like to add ?') at = int(input()) na = maxattendees - at if na <= 0: print('you can\'t add attendees') elif na < at: print('you can only add ', na, ' attendees') else: na = at # Getting the Attendees from input attemail = [] noms = [] f = 0 i = 1 g = 0 found = False found2 = False # get all contacts in a list for person in connections: emailAddresses = person.get('emailAddresses', []) names = person.get('names', []) attemail.append(emailAddresses[0].get('value')) noms.append(names[0].get('displayName')) print(noms) p = len(noms) # Int a list of attendees and it's length is the maximum number of attendees according to the room chosen befor attendees = ['blabla@blabla'] * na # first attendee print('attendees :') a = input() # looking for the contact in contact list if a != '': while (g != p) & (found is False): # if the name in the input matches the a name in the list we add the email of that person to the attendees # list which will be treated later to delete the examples '*****@*****.**' if noms[g] == a: attendees[0] = attemail[g] g = g + 1 found = True else: g = g + 1 if found is False: print('contact not found try again please') else: print('no attendees added') # other attendees to add less then max number of attendees while i != na: a = input() if a == '': break else: while (f != p) | (found2 is False): if noms[f] == a: attendees[i] = attemail[f] found2 = True f = f + 1 i = i + 1 # until this stage we have a list of attendees + blanks filled with [email protected] # print(attendees) l = len(attendees) # print(l) # in this part we are going to get the attendees without the blanks t = 0 att = [] while t != l: if attendees[t] != 'blabla@blabla': att.append(attendees[t]) t = t + 1 else: t = t + 1 l2 = len(att) print(att) # print(l2) w = 0 attendemail = [] while w != len(attendees): print(attendees[w]) attendemail.append(attendees[w]) w = w + 1 # attendname.append(attendees[0].get('displayName')) attendee = [] print(attendemail) for s in range(len(invitedattendemail)): email = {'email': invitedattendemail[s]} attendee.append(email) print(attendee) for r in range(len(attendemail)): email = {'email': attendemail[r]} attendee.append(email) print(attendee) event['attendees'] = attendee updated_event = service.events().update(calendarId='primary', eventId=eventid, body=event).execute() elif u == 'Start Date/Time': start = event['start'].get('dateTime', event['start'].get('date')) print('Your event starts at ', start) print('what\'s the new start date of the event? yyyy-mm-ddT00:00:00') nsd = input() event['start'] = nsd updated_event = service.events().update(calendarId='primary', eventId=eventid, body=event).execute() elif u == 'End': end = event['end'].get('dateTime', event['start'].get('date')) print('Your event ends at ', end) print('what\'s the new end date of the event? yyyy-mm-ddT00:00:00') ned = input() nedt = ned.get('dateTime') event['end'] = nedt + '+01:00' updated_event = service.events().update(calendarId='primary', eventId=eventid, body=event).execute()
def main(argv): parent_id = 0 CLIENT_SECRET = 'client_secret_vsu.json' # downloaded JSON file # Check https://developers.google.com/drive/scopes for all available scopes OAUTH_SCOPE = [ 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/drive.appdata', 'https://www.googleapis.com/auth/drive.apps.readonly' ] # Redirect URI for installed apps REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob' storage = Storage('storage_vsu2.json') credentials = storage.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(CLIENT_SECRET, ' '.join(OAUTH_SCOPE)) credentials = tools.run(flow, storage) # Create an httplib2.Http object and authorize it with our credentials http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) filename = 'Dashboard Project Name List.xlsx' new_filename = 'Dashboard Project Name List.xlsx' mime_type = "application/vnd.ms-excel" new_mime_type = "application/vnd.ms-excel" title = 'Dashboard Project Name List' new_title = 'Dashboard Project Name List' new_revision = True # upload_type = 'media' # Update existing file: file_id = '1MlSYNzhskJHcZpTFiUuzAmoGXEKhKD_rFCYVyYhQuSI' try: # First retrieve the file from the API. file = drive_service.files().get(fileId=file_id).execute() # File's new metadata. file['title'] = title file['mimeType'] = mime_type # File's new content. media_body = MediaFileUpload(new_filename, mimetype=new_mime_type, resumable=True) # Send the request to the API. updated_file = drive_service.files().update( fileId=file_id, body=file, newRevision=new_revision, media_body=media_body).execute() return updated_file except errors.HttpError, error: print 'An error occurred: %s' % error return None
def getFile(file): storage = Storage(file) return storage.get()
def _get_creds(self, oauth_file): if os.path.exists(oauth_file): storage = Storage(oauth_file) creds = storage.get() if creds is not None and not creds.invalid: return creds
def getCredentials(): credPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), CRED_FILENAME) storage = Storage(credPath) credentials = storage.get() if not credentials: print "Get credentials before trying to upload (authorize.py)" sys.exit(1) return credentials if __name__ == "__main__": storage = Storage(CRED_FILENAME) if not storage.get(): print "Getting and saving credentials" # Run through the OAuth flow and retrieve authorization code flow = flow_from_clientsecrets(CLIENTSECRETS_LOCATION, ' '.join(SCOPES)) flow.redirect_uri = REDIRECT_URI authorize_url = flow.step1_get_authorize_url() print 'Go to the following link in your browser: ' + authorize_url code = raw_input('Enter verification code: ').strip() credentials = flow.step2_exchange(code) ### Storing access token and a refresh token in CRED_FILENAME storage.put(credentials) else: print "Credentials already available"
class GoogleCalendar(): def __init__(self): oauth_keyfile_path = (os.path.dirname(os.path.abspath(__file__)) + '/google_oauth') FLAGS = gflags.FLAGS if os.path.exists(oauth_keyfile_path + '.db'): db = dbm.open(oauth_keyfile_path) client_id = db['client_id'] client_secret = db['client_secret'] developer_key = db['developer_key'] else: print 'create Google OAuth file...' db = dbm.open(oauth_keyfile_path, 'n') client_id = raw_input('input client_id :') client_secret = raw_input('input client_secret :') developer_key = raw_input('input developer key :') db['client_id'] = client_id db['client_secret'] = client_secret db['developer_key'] = developer_key db.close() # Set up a Flow object to be used if we need to authenticate. This # sample uses OAuth 2.0, and we set up the OAuth2WebServerFlow with # the information it needs to authenticate. Note that it is called # the Web Server Flow, but it can also handle the flow for native # applications # The client_id and client_secret are copied from the API Access tab on # the Google APIs Console self.FLOW = OAuth2WebServerFlow( client_id=client_id, client_secret=client_secret, scope='https://www.googleapis.com/auth/calendar', user_agent='twitter2calnedar/1.0') # To disable the local server feature, uncomment the following line: FLAGS.auth_local_webserver = False # If the Credentials don't exist or are invalid, # run through the native client flow. # The Storage object will ensure that if successful the good # Credentials will get written back to a file. self.storage = Storage('calendar.dat') self.credentials = self.storage.get() if self.credentials is None or self.credentials.invalid is True: self.credentials = run(self.FLOW, self.storage) # Create an httplib2.Http object to handle our HTTP requests # and authorize it with our good Credentials. http = httplib2.Http() http = self.credentials.authorize(http) # Build a service object for interacting with the API. Visit # the Google APIs Console # to get a developerKey for your own application. self.service = build(serviceName='calendar', version='v3', http=http, developerKey=developer_key) def connect(self): print 'OK' if self.service else 'BAD' dbfile_path = os.path.dirname(os.path.abspath(__file__)) + '/gc' if os.path.exists(dbfile_path + '.db'): db = dbm.open(dbfile_path) self.calendar_id = db['id'] print self.calendar_id cal = (self.service.calendars().get( calendarId=self.calendar_id).execute()) print cal['summary'] else: print 'process create' self.createCalendar() raw_input('continue...') def createCalendar(self): calendar = {'summary': 'Twitter2GoogleCalendar', 'timeZone': timezone} created_calendar = (self.service.calendars().insert( body=calendar).execute()) print created_calendar['id'], created_calendar['timeZone'] dbfile_path = os.path.dirname(os.path.abspath(__file__)) + '/gc' db = dbm.open(dbfile_path, 'n') db['id'] = created_calendar['id'] def createEvent(self, event_info): event = { 'summary': event_info['title'], 'location': event_info['location'], 'start': { 'dateTime': (event_info['start'] if 'start' in event_info else None), 'date': event_info['date'] if 'date' in event_info else None, 'timeZone': timezone }, 'end': { 'dateTime': event_info['end'] if 'end' in event_info else None, 'date': event_info['date'] if 'date' in event_info else None, 'timeZone': timezone }, } if ('reccurrence' in event_info and event_info['recurrence']['freq'] and event_info['recurrence']['count']): recurrence = ('RRULE:FREQ=' + event_info['recurrence']['freq'] + ';COUNT=' + event_info['recurrence']['count']) event['recurrence'] = [recurrence] try: created_event = (self.service.events().insert( calendarId=self.calendar_id, body=event).execute()) print created_event['id'] print event return created_event['id'] except Exception: print 'error: event insertion' self.credentials.refresh(httplib2.Http()) return False
def load_credentials(credential_file_path): storage = Storage(credential_file_path) return storage.get()
def create_credentials(scope): """ ------------------------------------------------------------------------------------------------- Generate the credential file related to the scope provided and store it the .credentials folder of the home directory Input-value: String taking one of the following values: - https://www.googleapis.com/auth/drive - https://www.googleapis.com/auth/drive.readonly - https://www.googleapis.com/auth/drive.appfolder - https://www.googleapis.com/auth/drive.file - https://www.googleapis.com/auth/drive.install - https://www.googleapis.com/auth/drive.metadata - https://www.googleapis.com/auth/drive.metadata.readonly - https://www.googleapis.com/auth/drive.scripts Output-value: N/A Example: create_credentials("https://www.googleapis.com/auth/drive.metadata") ------------------------------------------------------------------------------------------------- """ # Create a list of available scopes available_scopes = [ 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/drive.readonly', 'https://www.googleapis.com/auth/drive.appfolder', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive.install', 'https://www.googleapis.com/auth/drive.metadata', 'https://www.googleapis.com/auth/drive.metadata.readonly', 'https://www.googleapis.com/auth/drive.scripts' ] # Check if the scope exists if scope in available_scopes: # Get the path of your home directory home_dir = os.path.expanduser('~') # Get the path of the .credentials folder (in home directory) credential_dir = os.path.join(home_dir, '.credentials') # Create the .credentials folder if it does not exist if not os.path.exists(credential_dir): os.makedirs(credential_dir) # Store the path of the google drive client secret file client_secret_path = os.path.join( credential_dir, 'client_secret_google_drive_api.json') # Check if the client secret file exists if not os.path.exists(client_secret_path): result = """ The client_secret_google_drive_api.json file does not exist. Credentials cannot be created without this file. Please follow these instructions to create the client secret file: 1 - Use the following link to create or select a project in the Google Developers Console and automatically turn on the API. Click Continue, then Go to credentials. Link: https://console.developers.google.com/start/api?id=drive 2 - On the Add credentials to your project page, click the Cancel button. 3 - At the top of the page, select the OAuth consent screen tab. Select an Email address, enter a Product name if not already set, and click the Save button. 4 - Select the Credentials tab, click the Create credentials button and select OAuth client ID. 5 - Select the application type Other, enter the name "Google Drive API Python", and click the Create button. 6 - Click OK to dismiss the resulting dialog. 7 - Click the file_download (Download JSON) button to the right of the client ID. 8 - Move this file to the following location: """ + client_secret_path + """ 9 - Rename it to client_secret_google_drive_api.json """ else: credential_name = "google_drive_api_scope_" + os.path.basename( scope).replace(".", "_") + ".json" credential_path = os.path.join(credential_dir, credential_name) store = Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(client_secret_path, scope) flow.user_agent = 'Google Drive API Python' if flags: credentials = tools.run_flow(flow, store, flags) else: # Needed only for compatibility with Python 2.6 credentials = tools.run(flow, store) result = 'The credentials have been created and stored here: ' + credential_path # If the scope does not exist send message else: result = """ This scope provided does not exist. Here is the list of all available scopes: - https://www.googleapis.com/auth/drive - https://www.googleapis.com/auth/drive.readonly - https://www.googleapis.com/auth/drive.appfolder - https://www.googleapis.com/auth/drive.file - https://www.googleapis.com/auth/drive.install - https://www.googleapis.com/auth/drive.metadata - https://www.googleapis.com/auth/drive.metadata.readonly - https://www.googleapis.com/auth/drive.scripts """ print(result)
def thread_function(): store = Storage( os.path.join(path_data, 'db', 'gdrive', '%s.json' % self.gdrive_name)) creds = store.get() if not creds or creds.invalid: #flow = client.flow_from_clientsecrets('credentials.json', SCOPES) #creds = tools.run_flow(flow, store) return -1 page_token = get_start_page_token(creds) while self.flag_thread_run: try: #time.sleep(self.change_check_interval) for _ in range(self.change_check_interval): #logger.debug('%s %s', self.gdrive_name, _) if self.flag_thread_run == False: return time.sleep(1) results = self.gdrive_service.changes().list( pageToken=page_token, pageSize=1000, fields="changes( \ file( \ id, md5Checksum,mimeType,modifiedTime,name,parents,teamDriveId,trashed \ ), \ fileId,removed \ ), \ newStartPageToken").execute() page_token = results.get('newStartPageToken') logger.debug('PAGE_TOKEN:%s' % page_token) items = results.get('changes', []) for _ in items: logger.debug('1.CHANGE : %s', _) # 2019-03-10 변경시에는 2개를 보내야 한다. is_add = True is_file = True if _['removed'] == True: is_add = False fileid = _['fileId'] if fileid in self.cache: file_meta = { 'name': self.cache[fileid]['name'], 'parents': self.cache[fileid]['parents'], #'mimeType' : self.cache[fileid]['mimeType'], } file_meta['mimeType'] = self.cache[fileid][ 'mimeType'] if 'mimeType' in self.cache[ fileid] else 'application/vnd.google-apps.folder' else: logger.debug('remove. not cache') continue else: if 'file' in _: if _['file'][ 'mimeType'] == 'application/vnd.google-apps.folder': logger.debug('FOLDER') elif _['file']['mimeType'].startswith('video'): logger.debug('FILE') else: logger.debug('not folder, not video') continue fileid = _['file']['id'] #삭제시에는 inqueue.. 바로 반영이 될까? RemoveWaitFile만들자 #일반적일때는 addwait? #logger.debug(u'{0} ({1})'.format(_['file']['name'], _['file']['id']).encode('cp949')) file_meta = self.gdrive_service.files().get( fileId=fileid, fields= "id,mimeType, modifiedTime,name,parents,trashed" ).execute() if file_meta[ 'mimeType'] == 'application/vnd.google-apps.folder': is_file = False logger.debug('IS_ADD : %s IS_FILE :%s', is_add, is_file) job_list = [] if is_add and is_file: job_list = [[file_meta, 'ADD', is_file]] elif is_add and not is_file: job_list = [[file_meta, 'ADD', is_file]] #폴더 변경 if fileid in self.cache: remove_file_meta = { 'name': self.cache[fileid]['name'], 'parents': self.cache[fileid]['parents'], #'mimeType' : self.cache[fileid]['mimeType'], } remove_file_meta['mimeType'] = self.cache[fileid][ 'mimeType'] if 'mimeType' in self.cache else 'application/vnd.google-apps.folder' ttmp = (remove_file_meta['mimeType'] != 'application/vnd.google-apps.folder') job_list.insert( 0, [remove_file_meta, 'REMOVE', ttmp]) elif not is_add and is_file: job_list = [[file_meta, 'REMOVE', is_file]] elif not is_add and not is_file: job_list = [[file_meta, 'REMOVE', is_file]] for job in job_list: file_meta = job[0] type_add_remove = job[1] is_file = job[2] logger.debug('2.FILEMETA:%s %s %s' % (file_meta, type_add_remove, is_file)) file_paths = self.get_parent(file_meta) if file_paths is None: logger.debug('get_parent is None') continue gdrivepath = '/'.join(file_paths) logger.debug('3.GdrivePath:%s' % gdrivepath) mount_abspath = self.get_mount_abspath(file_paths) if mount_abspath is None: logger.debug('NOT MOUNT INFO') continue logger.debug('4.MountPath:%s' % mount_abspath) #s_id = PLEX_DB.get_section_id(mount_abspath) s_id = self.get_section_id(mount_abspath) if s_id == -1: logger.debug( '5-2.IGNORE. %s file section_id is -1.', mount_abspath) else: # 삭제나 변경을 위해서다. if is_add: self.cache[fileid] = { 'name': file_meta['name'], 'parents': file_meta['parents'], 'mimeType': file_meta['mimeType'] } else: self.cache[fileid] = None # 2019-05-16 PLEX와 SJVA가 다르다 # 파일 """ if is_add and not is_file: try: if not os.listdir(mount_abspath): logger.debug('5. IS EMPTY!!') continue except: logger.debug('os.listdir exception!') continue """ #if PLEX_DB.is_exist_in_library(mount_abspath) == False: exist_in_library = self.is_exist_in_library( mount_abspath) if (not exist_in_library and type_add_remove == 'ADD') or ( exist_in_library and type_add_remove == 'REMOVE'): #pms_global.send_command(s_id, mount_abspath, type_add_remove, 'GDRIVE') self.send_command(s_id, mount_abspath, type_add_remove, is_file) logger.debug( '5-1.Send Command %s %s %s %s', s_id, mount_abspath, type_add_remove, is_file) else: logger.debug( '5-3.IGNORE. EXIST:%s TYPE:%s', exist_in_library, type_add_remove) # 2019-09-02 # 비디오스테이션을 위해 try: from .logic import Logic Logic.send_to_listener(type_add_remove, is_file, mount_abspath) except Exception as exception: logger.debug('Exception:%s', exception) logger.debug(traceback.format_exc()) logger.debug('6.File process end.. WAIT :%s', self.api_call_inverval) for _ in range(self.api_call_inverval): #logger.debug('%s %s', self.gdrive_name, _) if self.flag_thread_run == False: return time.sleep(1) logger.debug('7.AWAKE Continue') except TypeError as exception: page_token = get_start_page_token(creds) logger.debug('TYPE ERROR !!!!!!!!!!!!!!!!!!!!') logger.debug('Exception:%s', exception) logger.debug(traceback.format_exc()) except Exception as exception: logger.debug('Exception:%s', exception) logger.debug(traceback.format_exc())
def run(self): print '! STARTING Database Update %s %s %s %s' % ( self.calendar_id, self.start_date, self.end_date, self.flow) storage = Storage('credentials.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(flow, storage) http = httplib2.Http() http = credentials.authorize(http) service = build('calendar', 'v3', http=http) logging.debug('Opening DB for write') # Open database connection db = MySQLdb.connect(ip_address, db_user, db_pass, db_database) logging.debug('DB open for write') cursor = db.cursor() # prepare a cursor object using cursor() method logging.debug('Enumerating Results for database write') try: events = service.events().list(calendarId=self.calendar_id, maxResults=1000, orderBy='startTime', showDeleted='True', singleEvents='True', timeMax=self.end_date, timeMin=self.start_date).execute() while True: for event in events['items']: Write_DB(db, cursor, event['summary'], event['id'], event['start']['dateTime'], event['end']['dateTime'], event['status']) #print 'found start time %s' % event['end.dateTime'] try: if event['status'] == 'confirmed': #confirmed_list.append(event['status'] + event['summary'] + event['id']) print '>>>>> CONFIRMED Event %s with ID (%s) | Start Time = %s, End Time = %s' % ( event['summary'], event['id'], event['start']['dateTime'], event['end']['dateTime']) if event['status'] == 'cancelled': #cancelled_list.append(event['status'] + event['summary'] + event['id']) print '! CANCELLED Event %s with ID (%s) | Start Time = %s, End Time = %s' % ( event['summary'], event['id'], event['start']['dateTime'], event['end']['dateTime']) #print '%s - %s - %s - %s - %s' %(event['status'],event['summary'],event['start.timeZone'],event['end'],event['id']) #print event except KeyError: print 'No Summary' except: print 'Some Exception' page_token = events.get('nextPageToken') if page_token: events = service.events().list( calendarId=calendar_id, maxResults=1000, orderBy='startTime', showDeleted='True', singleEvents='True', timeMax=end_date, timeMin=start_date, pageToken=page_token).execute() else: break except AccessTokenRefreshError: # The AccessTokenRefreshError exception is raised if the credentials # have been revoked by the user or they have expired. print( 'The credentials have been revoked or expired, please re-run' 'the application to re-authorize') #Update_DB(self.calendar_service,self.start_date,self.end_date) logging.debug('Closing DB Connection') db.close() logging.debug('DB connection Closed') print '! Finished Database Update'
def __init__( self, config_file="C:\Users\Lazada\Google Drive\Analytics team\SK playground\credentials\\config.txt", id_mapping_file="C:\Users\Lazada\Google Drive\Analytics team\SK playground\credentials\\id_mapping.csv", map_master_file='map_master.xlsx', file_tracker='file_tracker.csv'): """ initialize the class. requires: 1. Config txt file that has the API key to access the GA accounts [config.txt] 2. id_mapping csv file that maps all the accounts to its respective table ids and country [id_mapping.csv] 3. Map master excel file that has the settings you want to extract [map_master.xlsx] 4. File tracker csv that it will write the output to check for gdrive download statuses - default is set to file_tracker.csv""" self.file_tracker = file_tracker # Read in config data file and pass values self.config_file = config_file self.config_data = {} text = open(config_file, "r") a = [] for line in text: a = line.split(';') self.config_data[a[0]] = a[1].strip() text.close() self.path_name = self.config_data['path_name'] self.client_id = self.config_data['client_id'] self.client_secret = self.config_data['client_secret'] self.df_id_mapping = pd.read_csv(id_mapping_file) self.df_map_master = pd.read_excel(map_master_file, sheetname='Main') self.df_map_ga = pd.read_excel(os.path.join(self.path_name, map_master_file), sheetname='Main') self.df_map_ga = self.df_map_ga.reset_index(drop=True) self.df_map_ga[ 'map_fk'] = self.df_map_ga.loc[:, 'country'] + '_' + self.df_map_ga.loc[:, 'platform'] self.df_map_ga = self.df_map_ga.merge(self.df_id_mapping, how='left', left_on='map_fk', right_on='name') self.df_map_ga = self.df_map_ga.drop(['Platform'], axis=1) # self.df_map_ga=self.df_map_ga.rename(columns={'Platform_x':'Platform'}) self.df_mm = self.df_map_ga.drop(['Country'], axis=1) # self.df_mm=self.df_map_ga.rename(columns={'Country':'Country'}) # Setup the logger to log the issues self.logger = logging.getLogger( self.config_file.split('.')[0] + "_logger") self.logger.setLevel(logging.INFO) fh = logging.FileHandler(self.path_name + "\\" + "ga_extractor_logger" + '.log') fh.setLevel(logging.INFO) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) # add the handlers to logger self.logger.addHandler(fh) # Authenticate and get a service object attempt_1 = 0 while attempt_1 < 4: try: # Send in client secret and client ID to the authetication server. Need to set this up in google developer console to get the client secrets and ID # Then need to also activate google analytics in the allowed applications flow = OAuth2WebServerFlow( self.client_id, self.client_secret, 'https://www.googleapis.com/auth/analytics') # Stores the credentials in credentials.dat (i think) storage = Storage('credentials.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(flow, storage) # Use the credentials to get authentication? # Finally if this is the first time, your browser should pop to ask for login and permission allowing app http = httplib2.Http() http = credentials.authorize(http) self.service = build('analytics', 'v3', http=http) attempt_1 = 100 except Exception as e_connection: attempt_1 += 1 self.logger.info('Exception is: ' + str(e_connection)) self.logger.info('Attempt number ' + str(attempt_1)) print('Exception is: ' + str(e_connection) + '\n' + 'Attempt number ' + str(attempt_1)) time.sleep(7) pass #Log success in logging in and put start and end dates in self.logger.info('Authentication successful')
# the information it needs to authenticate. Note that it is called # the Web Server Flow, but it can also handle the flow for # installed applications. # # Go to the Google API Console, open your application's # credentials page, and copy the client ID and client secret. # Then paste them into the following code. FLOW = OAuth2WebServerFlow( client_id='1019838388650-nt1mfumr3cltemeq7js8mjitn7a2kuu7.apps.googleusercontent.com', client_secret='rx7eaJ-13TiHqOWIiF-Bxu4L', scope='https://www.googleapis.com/auth/contacts.readonly', user_agent='Smart assistant box') # If the Credentials don't exist or are invalid, run through the # installed application flow. The Storage object will ensure that, # if successful, the good Credentials will get written back to a # file. storage = Storage('info.dat') credentials = storage.get() if credentials is None or credentials.invalid == True: credentials = run(FLOW, storage) # Create an httplib2.Http object to handle our HTTP requests and # authorize it with our good Credentials. http = httplib2.Http() http = credentials.authorize(http) # Build a service object for interacting with the API. To get an API key for # your application, visit the Google API Console # and look at your application's credentials page. people_service = build(serviceName='people', version='v1', http=http)
def commenceGoogle(remoteFolder): """ Create a Drive service """ auth_required = True #Have we got some credentials already? storage = Storage(GOOGLE_CREDENTIALS) credentials = storage.get() try: if credentials: # Check for expiry if credentials.access_token_expired: log('Google token has expired') if credentials.refresh_token is not None: credentials.refresh(httplib2.Http()) log('Google token refreshed OK') auth_required = False else: log('Google refresh_token is None') else: auth_required = False else: log('Google could not find or could not access credentials') except: # Something went wrong - try manual auth log('Google Cached Auth failed') if auth_required: log('STATUS: Aborted. Google requires re-authentication') return 0 else: log('Auth NOT required') #Get the drive service try: http_auth = credentials.authorize(httplib2.Http()) DRIVE = discovery.build('drive', 'v2', http_auth, cache_discovery=False) except Exception as e: log(f'Error creating Google DRIVE object: {e}') log('STATUS: Error creating Google DRIVE object') return 0 newFiles = list_New_Images(PI_PHOTO_DIR, UPLOADED_PHOTOS_LIST) numNewFiles = len(newFiles) if numNewFiles == 0: log('STATUS: No files to upload') else: numFilesOK = 0 previousFilePath = '' for needupload in newFiles: log(f'Uploading {needupload}') # Format the destination path to strip the /home/pi/photos off: shortPath = makeShortPath(remoteFolder, needupload) log(f'ShortPath: {shortPath}') remoteFolderTree = os.path.split(shortPath) if previousFilePath != remoteFolderTree[0]: ImageParentId = None # Confirm the tree exists, or build it out: foldersList = remoteFolderTree[0].split("/") if len(foldersList) != 0: for oneFolder in foldersList: childFolderId = getGoogleFolder( DRIVE, oneFolder, ImageParentId) if childFolderId is None: #Nope, that folder doesn't exist. Create it: newFolderId = createGoogleFolder( DRIVE, oneFolder, ImageParentId) if newFolderId is None: log('Aborted uploading to Google. Error creating newFolder' ) log(f'STATUS: Google upload aborted. {numFilesOK} of {numNewFiles} files uploaded OK' ) return 0 else: ImageParentId = newFolderId else: ImageParentId = childFolderId #By here we have the destination path id #Now upload the file file_name = remoteFolderTree[1] try: media = MediaFileUpload(needupload, mimetype='image/jpeg') result = DRIVE.files().insert(media_body=media, body={ 'title': file_name, 'parents': [{ u'id': ImageParentId }] }).execute() if result is not None: numFilesOK = uploadedOK(needupload, numFilesOK) else: log(f"Bad result uploading '{needupload}' to Google: {result}" ) except Exception as e: errorMsg = str(e) log(f'Error uploading {needupload} via Google: {errorMsg}') if 'returned' in errorMsg: errorReason = errorMsg.split('"')[1] log(f'STATUS: Google error: {errorReason}') if 'The user has exceeded their Drive storage quota' in errorReason: log('Google upload aborted - no space') return 0 previousFilePath = remoteFolderTree[0] log(f'STATUS: {numFilesOK} of {numNewFiles} files uploaded OK') return 0
def main(): # Create a Storage object. This object holds the credentials that your # application needs to authorize access to the user's data. The name of the # credentials file is provided. If the file does not exist, it is # created. This object can only hold credentials for a single user, so # as-written, this script can only handle a single user. storage = Storage('auth/credentials.data') # The get() function returns the credentials for the Storage object. If no # credentials were found, None is returned. credentials = storage.get() # If no credentials are found or the credentials are invalid due to # expiration, new credentials need to be obtained from the authorization # server. The oauth2client.tools.run_flow() function attempts to open an # authorization server page in your default web browser. The server # asks the user to grant your application access to the user's data. # If the user grants access, the run_flow() function returns new credentials. # The new credentials are also stored in the supplied Storage object, # which updates the credentials.dat file. if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, storage, tools.argparser.parse_args()) # Create an httplib2.Http object to handle our HTTP requests, and authorize it # using the credentials.authorize() function. http = httplib2.Http() http = credentials.authorize(http) # The apiclient.discovery.build() function returns an instance of an API service # object can be used to make API calls. The object is constructed with # methods specific to the calendar API. The arguments provided are: # name of the API ('calendar') # version of the API you are using ('v3') # authorized httplib2.Http() object that can be used for API calls service = build('calendar', 'v3', http=http) try: calendars = [ 'primary', '#[email protected]', '*****@*****.**', # EDT '*****@*****.**' # FAM ] wAllDay = [] wEvents = [] now = datetime.datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) tomorrow = now + datetime.timedelta(days=1) now = now.isoformat() + 'Z' tomorrow = tomorrow.isoformat() + 'Z' for calendar in calendars: # The Calendar API's events().list method returns paginated results, so we # have to execute the request in a paging loop. First, build the # request object. The arguments provided are: # primary calendar for user request = service.events().list(calendarId=calendar, timeMin=now, timeMax=tomorrow) # Loop until all pages have been processed. while request != None: # Get the next page. response = request.execute() # Accessing the response like a dict object with an 'items' key # returns a list of item objects (events). for event in response.get('items', []): # Summary wSummary = event.get('summary', 'NO SUMMARY') # Date wDate = event.get('start', None) if 'dateTime' in wDate: wDate = wDate['dateTime'] wDate = dateutil.parser.parse(wDate) wEvents.append( wDate.strftime('%H heure %M') + ' : ' + wSummary) elif 'date' in wDate: wAllDay.append(wSummary) # Get the next request object by passing the previous request object to # the list_next method. request = service.events().list_next(request, response) # end if wAllDay or wEvents: print "Aujourdhui : " for e in wEvents: print e + '.' if wAllDay: print "Toute la journée :" for e in wAllDay: print e + '.' except AccessTokenRefreshError: # The AccessTokenRefreshError exception is raised if the credentials # have been revoked by the user or they have expired. print( 'The credentials have been revoked or expired, please re-run' 'the application to re-authorize')
def main(argv): # Set the log level logging.getLogger().setLevel(getattr(logging, "DEBUG")) # If the Credentials don't exist or are invalid run through the native client # flow. The Storage object will ensure that if successful the good # Credentials will get written back to a file. storage = Storage('latitude.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = run(FLOW, storage) # Create an httplib2.Http object to handle our HTTP requests and authorize it # with our good Credentials. http = httplib2.Http() http = credentials.authorize(http) try: params = { 'granularity': 'best', 'max-results': args.max_results, 'min-time': args.min_time * 1000, 'max-time': args.max_time * 1000, 'alt': 'json', } resp, content = http.request( "https://www.googleapis.com/latitude/v1/location?" + urllib.urlencode(params), "GET") if resp['status'] != '200': raise Exception("Invalid response %s. (Content follows)\n%s" % (resp['status'], content)) json_list = json.loads(content) if 'items' in json_list['data']: last_longitude = None last_latitude = None for loc in reversed(json_list['data']['items']): longitude = loc['longitude'] latitude = loc['latitude'] del loc['longitude'] del loc['latitude'] loc['loc'] = [longitude, latitude] del loc['kind'] loc['source'] = 'latitude' loc['timestamp'] = { "$date": loc['timestampMs'] } del loc['timestampMs'] if args.include_delta and last_longitude and last_latitude: loc['distance_traveled'] = distance_on_unit_sphere( latitude, longitude, last_latitude, last_longitude) loc['distance_traveled_units'] = 'mph' last_longitude = longitude last_latitude = latitude print loc #if(args.verbose_timestamps): #if(True): #print "latitude, longitude, timestamp, accuracy, verbose timestamp" #for loc in reversed(json_list['data']['items']): ##print loc #print "%s, %s, %s, %s, %s" % (loc['latitude'], loc['longitude'], loc['timestampMs'], loc.get('accuracy', -1), datetime.datetime.fromtimestamp(float(loc['timestampMs'])/1000)) #else: #print "latitude, longitude, timestamp, accuracy" #for loc in reversed(json_list['data']['items']): #print "%s, %s, %s, %s" % (loc['latitude'], loc['longitude'], loc['timestampMs'], loc.get('accuracy', -1)) except AccessTokenRefreshError: print( "The credentials have been revoked or expired, please re-run" "the application to re-authorize")
def __init__(self, config, args): """ Construct an attachment reader given a configuration dictionary and parsed command line arguments. Config is a dictionary with these keys: application -- the application name used to create gmail credentials credential_file -- the credentials (secrets) file from gmail authentication_file -- a file in which to save the authentication token checkEverySeconds -- how often to check for new messages (0 means check once and exit label -- look only at unread messages with this label. dispose -- message disposition after success: read: means mark it read trash: means move the message to trash unlabel: means remove the label defined by "label" mimeType -- What type of attachments to download (i.e. "image/" downloadDirectory -- where should downloaded attachments be stored capture_base64 -- for debugging, write the base64 attachment to a file before decoding it. verbose -- true. Write extra information to standard out. args: are arguments parsed with argparse.ArgumentParser. They are passed to gmail authentication process. """ self._applicationName = config["application"] self._credentialFile = config["credential_file"] self._tokenFile = config["authentication_file"] self._label = config["label"] self._dispose = getConfig(config, "dispose", "read") self._checkEverySeconds = getConfig(config, "checkEverySeconds", 3600) self._mimeType = getConfig(config, "mimeType", "image/") self._captureBase64 = getConfig(config, "capture_base64", False) self._verbose = getConfig(config, "verbose", False) self._downloadDirectory = os.path.expanduser( os.path.expandvars(config["downloadDirectory"])) self._generatedFilenameFormat = getConfig(config, "generatedFilenameFormat", "%Y%m%d_%H%M%S") home_dir = os.path.expanduser('~') credential_dir = os.path.join(home_dir, '.credentials') optionallyCreateDirectory(credential_dir) credentialPath = os.path.join(credential_dir, self._tokenFile) store = Storage(credentialPath) credentials = store.get() if not credentials or credentials.invalid: googleAuthSite = "https://www.googleapis.com/auth/gmail.modify" flow = client.flow_from_clientsecrets(self._credentialFile, googleAuthSite) flow.user_agent = self._applicationName credentials = tools.run_flow(flow, store, args) print('Storing credentials to ' + credentialPath) store.put(credentials) self._credentials = credentials optionallyCreateDirectory(self._downloadDirectory) print( "Downloading {} attachments from messages labeled {} to {}".format( self._mimeType, self._label, self._downloadDirectory)) self._worker = None self._service = None self._stopRequested = False
def from_store(filepath): store = Storage(filepath) return store.get()