def __init__(self): self._is_gae = os.getenv( 'SERVER_SOFTWARE', '').startswith('Google App Engine') if self._is_gae: credentials = AppAssertionCredentials(scope=self._SCOPE) http = credentials.authorize(httplib2.Http()) else: # parser = argparse.ArgumentParser(description=__doc__, # formatter_class=argparse.RawDescriptionHelpFormatter, # parents=[tools.argparser]) # flags = parser.parse_args([]) # self._FLOW = client.flow_from_clientsecrets(self._CLIENT_SECRETS, # scope=[ # 'https://www.googleapis.com/auth/analytics.readonly' # ], # redirect_uri= # message=tools.message_if_missing(self._CLIENT_SECRETS)) # storage = file.Storage('analytics.dat') # credentials = storage.get() # if credentials is None or credentials.invalid: # credentials = tools.run_flow(self._FLOW, storage, flags) # http = httplib2.Http() http = auth_google_api.get_http_service() self._service = build('analytics', 'v3', http=http)
def _get_content(self, path, method='POST', body=None): scope = [ 'https://www.googleapis.com/auth/genomics' ] # The API Key is required when deployed to app engine api_key = os.environ['API_KEY'] credentials = AppAssertionCredentials(scope=scope) http = httplib2.Http(cache=memcache) http = credentials.authorize(http) try: response, content = http.request( uri="https://www.googleapis.com/genomics/v1beta/%s?key=%s" % (path, api_key), method=method, body=json.dumps(body) if body else None, headers={'Content-Type': 'application/json; charset=UTF-8'}) except DeadlineExceededError: raise ApiException('API fetch timed out') try: content = json.loads(content) except ValueError: logging.error("non-json api content %s" % content) raise ApiException('The API returned invalid JSON') if response.status >= 300: logging.error("error api response %s" % response) logging.error("error api content %s" % content) if 'error' in content: raise ApiException(content['error']['message']) else: raise ApiException('Something went wrong with the API call!') return content
def get_big_query_service(): global _bigquery_service if _bigquery_service is None: credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http(memcache)) _bigquery_service = build('bigquery', 'v2', http=http) return _bigquery_service
def createDriveService(credential_path=None): """ Create drive service based on service account credentialed from api_key in secret_files/config.json. Utilizes 'https://www.googleapis.com/auth/drive' scope. """ # Determine if running in local appengine SDK dev_server environment. # AppAssertionCredentials does not load in this context. # Therefore, service must be mimicked locally using an installed client oauth flow. if os.environ.get('SERVER_SOFTWARE') is None: os.environ['SERVER_SOFTWARE'] = 'Development' if 'Development' in os.environ.get('SERVER_SOFTWARE'): # Acquire credentials stored from running `python secret.py` from the commandline. if credential_path is None: credential_path = secret_cred.CREDENTIALS storage = Storage(credential_path) credentials = storage.get() http = httplib2.Http() http = credentials.authorize(http) return build('drive', 'v2', http=http) else: service_account_email = secret_cred.SERVICE_ACCOUNT api_key = secret_cred.API_KEY credentials = AppAssertionCredentials(scope=OAUTH_SCOPE) http = httplib2.Http() http = credentials.authorize(http) return build('drive', 'v2', http=http, developerKey=api_key)
def get_service(): global _service if not _service: credentials = AppAssertionCredentials(scope=_BIGQUERY_OAUTH_SCOPE) http = credentials.authorize(httplib2.Http()) _service = discovery.build('bigquery', 'v2', http=http) return _service
def __init__ (self, table, date = None): if date is not None: self.today = date else: self.today = datetime.today() logger.info("date:" + str(self.today)) urlfetch.set_default_fetch_deadline(60) # Increase url fetch deadline for slow Google Analytics API calls # Fetch the API key if we haven't pulled it from the keyfile already global api_key if api_key == "": with open ("key.txt", "r") as keyfile: api_key=keyfile.read().replace('\n', '') # Set Query Range self.startdate = self.today - query_range self.extended_startdate = self.today - extended_query_range; self.expdate = self.today - cache_time # Setup analytics service authentication credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/analytics.readonly') http_auth = credentials.authorize(Http(memcache)) self.service = build('analytics', 'v3', http=http_auth, developerKey=api_key) self.table_id = table
def DeleteCalendar_func(calendarId): credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/calendar') http_auth = credentials.authorize(Http()) calendar = build('calendar', 'v3', http=http_auth) service = discovery.build('calendar', 'v3', http=http_auth) service.calendars().delete(calendarId=calendarId).execute()
def __init__(self): self._is_gae = os.getenv( 'SERVER_SOFTWARE', '').startswith('Google App Engine') if self._is_gae: credentials = AppAssertionCredentials(scope=self._SCOPE) http = credentials.authorize(httplib2.Http()) else: http = auth_google_api.get_http_service() self._service = build('bigquery', 'v2', http=http)
def TestQuery(): """Runs a test query against the measurement-lab BigQuery database. Returns: (string) The query results formatted as an HTML page. """ # Certify BigQuery access credentials. credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http(memcache)) service = build('bigquery', 'v2', http=http) job_runner = service.jobs() # Run a query against the BigQuery database. logging.debug('Query: %s' % TEST_QUERY) jobdata = {'configuration': {'query': {'query': TEST_QUERY}}} insert = job_runner.insert(projectId=PROJECT_ID, body=jobdata).execute() logging.debug('Response: %s' % insert) currentRow = 0 queryReply = job_runner.getQueryResults( projectId=PROJECT_ID, jobId=insert['jobReference']['jobId'], startIndex=currentRow).execute() results = queryReply while 'rows' in queryReply and currentRow < queryReply['totalRows'] : currentRow += len(queryReply['rows']) queryReply = job_runner.getQueryResults( projectId=PROJECT_ID, jobId=queryReply['jobReference']['jobId'], startIndex=currentRow).execute() if 'schema' not in results or 'fields' not in results['schema']: if 'schema' in queryReply and 'fields' in queryReply['schema']: results['schema'] = queryReply['schema'] if 'rows' in queryReply: results['rows'].extend(queryReply['rows']) # Format the results as an HTML page. body = '<h2>The Query</h2><pre>%s</pre>\n<hr>\n' % TEST_QUERY tablerows = '<tr>' for field in results['schema']['fields']: tablerows += '<th>%s</th>' % field['name'] for row in results['rows']: tablerows += '</tr><tr>' for value in row['f']: tablerows += '<td>%s</td>' % value['v'] tablerows += '</tr>' body += '<table border=1>\n%s\n</table>\n' % tablerows return '<!DOCTYPE html><html><body>%s</body></html>' % body
def initService(): api_key = 'YOUR GOOGLE API KEY' credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/calendar') http = httplib2.Http(memcache) http = credentials.authorize(http) service = build("calendar", "v3", http=http, developerKey=api_key) return service
def index(request): credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http()) bigquery_service = build('bigquery', 'v2', http=http) query_request = bigquery_service.jobs() query_data = {'query':'SELECT * FROM [maveriks_assessment_sprint_1.test_new]'} query_response = query_request.query(projectId=PROJECT_NUMBER, body=query_data).execute() return render_to_response('AssessingPie_toBeremoved/testmeveriks.html',{'result':query_response['rows']},context_instance = RequestContext(request))
def createDriveService(): """Builds and returns a Drive service object authorized with the application's service account. Returns: Drive service object. """ credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/drive') http = httplib2.Http() http = credentials.authorize(http) return build('drive', 'v1', http=http, developerKey='AIzaSyA9j1GWqNWUjpBA6DhRQzAQYeJQalfJSWs')
def build_bq_client(): from googleapiclient.discovery import build from oauth2client.appengine import AppAssertionCredentials import httplib2 SCOPE = 'https://www.googleapis.com/auth/bigquery' credentials = AppAssertionCredentials(scope=SCOPE) http = credentials.authorize(httplib2.Http()) bigquery_service = build('bigquery', 'v2', http=http) return bigquery_service
def _decorated(self, *args, **kwargs): credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/devstorage.full_control') http = credentials.authorize(httplib2.Http(memcache)) self.gcs_service = build('storage', 'v1', http=http, developerKey=settings.DEVELOPER_KEY) self.gcs_service.BUCKET = settings.BUCKET return function(self, *args, **kwargs)
class AppAssertionCredentialsBQClient(_BigQueryClient): """BigQuery client implemented with App Assertion Credentials. Use this BigQuery client if the application credentials should be used for BigQuery transactions. """ def _Connect(self): # Certify BigQuery access credentials. self._credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') self._http = self._credentials.authorize(httplib2.Http(memcache)) self._service = build('bigquery', 'v2', http=self._http)
def post(self): self.response.headers['Content-Type'] = 'text/plain' jobid = self.request.get('jobid') job = job_info.JobInfo.get_by_id(jobid) if not job: return payload = urllib.urlencode({'q': 'MAX_TRACE_HANDLES=10'}) query_url = '%s/query?%s' % (_PERFORMANCE_INSIGHTS_URL, payload) result = urlfetch.fetch(url=query_url, payload=payload, method=urlfetch.GET, follow_redirects=False, deadline=10) logging.info(result.content) taskid = str(uuid.uuid4()) traces = json.loads(result.content) default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs_file = gcs.open(_DEFAULT_BUCKET.format(name=taskid), 'w', content_type='text/plain', options={}, retry_params=default_retry_params) gcs_file.write(json.dumps(traces)) gcs_file.close() credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/compute') http = credentials.authorize(httplib2.Http(memcache)) compute = build("compute", "v1", http=http) startup_script = _STARTUP_SCRIPT.format( revision=job.revision) result = self._CreateGCEInstace( compute, 'mr-%s' % jobid, startup_script) logging.info('Call to instances().insert response:\n') for k, v in sorted(result.iteritems()): logging.info(' %s: %s' % (k, v)) job.status = 'COMPLETE' job.put() response = {'success': False} self.response.out.write(json.dumps(response))
def createDriveService1(): """Builds and returns a Drive service object authorized with the application's service account. Returns: Drive service object. """ credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/drive') http = httplib2.Http() http = credentials.authorize(http) return build('drive', 'v2', http=http, developerKey=API_KEY)
def search_command(self, message=None): credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/urlshortener') http = credentials.authorize(httplib2.Http(memcache)) service = build("urlshortener", "v1", http=http) credentials.refresh(http) long_url = message.arg url = "http://gymkhana.iitb.ac.in/~ugacademics/wiki/index.php?search=" + long_url + "&go=Go&title=Special%3ASearch" shortened = service.url().insert(body={"longUrl": url}).execute() shortened1 = service.url().list().execute() message.reply(str(shortened1["items"][0]['id']))
def run(self, csv_output): credentials = AppAssertionCredentials(scope=SCOPE) http = credentials.authorize(httplib2.Http()) bigquery_service = build("bigquery", "v2", http=http) jobs = bigquery_service.jobs() table_name = 'datastore_data_%s' % datetime.datetime.utcnow().strftime( '%m%d%Y_%H%M%S') files = [str(f.replace('/gs/', 'gs://')) for f in csv_output] result = jobs.insert(projectId=PROJECT_ID, body=build_job_data(table_name, files)) result.execute()
def run(self, csv_output): credentials = AppAssertionCredentials(scope=SCOPE) http = credentials.authorize(httplib2.Http()) bigquery_service = build("bigquery", "v2", http=http) jobs = bigquery_service.jobs() table_name = 'datastore_data_%s' % datetime.datetime.utcnow().strftime( '%m%d%Y_%H%M%S') files = [str(f.replace('/gs/', 'gs://')) for f in csv_output] result = jobs.insert(projectId=PROJECT_ID, body=build_job_data(table_name,files)) result.execute()
def sendToBQ(project_id, dataset_id, table_id, csv_name, schema): source_csv="gs://" + csv_name credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http(memcache)) service = build("bigquery", "v2", http=http) loadTable(service, project_id, dataset_id, table_id, source_csv, schema) ###############################################################################
def sendToBQ(project_id, dataset_id, table_id, csv_name, schema): source_csv = "gs://" + csv_name credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http(memcache)) service = build("bigquery", "v2", http=http) loadTable(service, project_id, dataset_id, table_id, source_csv, schema) ###############################################################################
def createDriveService(): """ Builds and returns a Drive service object authorized with the application's service account. Returns: Drive service object. """ from oauth2client.appengine import AppAssertionCredentials from apiclient.discovery import build credentials = AppAssertionCredentials(scope="https://www.googleapis.com/auth/drive") http = httplib2.Http() http = credentials.authorize(http) return build("drive", "v2", http=http, developerKey=API_KEY)
def search_command(self,message=None): credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/urlshortener') http = credentials.authorize(httplib2.Http(memcache)) service = build("urlshortener", "v1", http=http) credentials.refresh(http) long_url = message.arg url="http://gymkhana.iitb.ac.in/~ugacademics/wiki/index.php?search="+long_url+"&go=Go&title=Special%3ASearch" shortened = service.url().insert(body={"longUrl": url}).execute() shortened1 = service.url().list().execute() message.reply( str(shortened1["items"][0]['id']))
def index(request): credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http()) bigquery_service = build('bigquery', 'v2', http=http) query_request = bigquery_service.jobs() query_data = { 'query': 'SELECT * FROM [maveriks_assessment_sprint_1.test_new]' } query_response = query_request.query(projectId=PROJECT_NUMBER, body=query_data).execute() return render_to_response('AssessingPie_toBeremoved/testmeveriks.html', {'result': query_response['rows']}, context_instance=RequestContext(request))
def createDriveService(): """Builds and returns a Drive service object authorized with the application's service account. Returns: Drive service object. """ credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/drive') http = httplib2.Http() http = credentials.authorize(http) return build('drive', 'v1', http=http, developerKey='AIzaSyA9j1GWqNWUjpBA6DhRQzAQYeJQalfJSWs')
def get(self): if not super(CalendarStatus, self).get(): return credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/calendar') http_auth = credentials.authorize(Http()) calendar = build('calendar', 'v3', http=http_auth) service = discovery.build('calendar', 'v3', http=http_auth) template = JINJA_ENVIRONMENT.\ get_template('templates/calendar_status.html') calendars = service.calendarList().list().execute() self.render_data['calendars'] = [] item = {} for calendar in calendars['items']: self.render_data['calendars'].append(calendar) self.response.write(template.render(self.render_data))
def post(self): credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/devstorage.full_control') http = credentials.authorize(httplib2.Http(memcache)) gcs_service = build('storage', 'v1', http=http, developerKey=settings.DEVELOPER_KEY) object_name = self.request.get('object_name') bucket_name = self.request.get('bucket_name') logging.info('delete tasks qeueu: delete gcs: %s/%s' % (bucket_name, object_name)) try: req = gcs_service.objects().delete(bucket=bucket_name, object=object_name.encode('utf8')) resp = req.execute() except HttpError, error: logging.info(error)
def CreateEvent_func(calendar_id, summary, start_time, end_time, description): credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/calendar') http_auth = credentials.authorize(Http()) calendar = build('calendar', 'v3', http=http_auth) service = discovery.build('calendar', 'v3', http=http_auth) #это часть для auth, нужна везде event = { 'summary': summary, 'start': { 'dateTime': start_time + ':00+03:00' #html возвращает в формате немного другом }, 'end': { 'dateTime': end_time + ':00+03:00' }, 'description': description } event = service.events().insert(calendarId=calendar_id, body=event).execute() return event['id']
def google_api_access(scope): credentials = AppAssertionCredentials(scope=scope) http = credentials.authorize(httplib2.Http(memcache, 60 * 9)) def token_refresh_decorator(func): def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except client.AccessTokenRefreshError: logging.warn( "The credentials have been revoked or expired, refreshing the token", exc_info=True) credentials.authorize(http) return func(*args, **kwargs) return wrapper return token_refresh_decorator, http
def _get_http_auth(): """Get an authorized `httplib2.Http` object. Detect if we are running on appengine or locally and authorize accordingly. (If locally, a json file with service account credentials is searched for in the project root directory.) The service has read/write access. """ if _IS_APPENGINE: credentials = AppAssertionCredentials(scope=_SCOPES) else: # Look for credentials in a file named "gcs_credentials.json" # in the project's root directory. cred_dir = os.path.abspath(os.path.join(*(_LOCATION, os.path.pardir))) with open(os.path.join(cred_dir, 'gcs_credentials.json')) as _infile: creds = json.load(_infile) credentials = SignedJwtAssertionCredentials(creds['client_email'], creds['private_key'], scope=_SCOPES) return credentials.authorize(httplib2.Http())
def _get_content(self, path, method='POST', body=None): scope = [ 'https://www.googleapis.com/auth/genomics', 'https://www.googleapis.com/auth/devstorage.read_write' ] api_key = os.environ['API_KEY'] credentials = AppAssertionCredentials(scope=scope) http = httplib2.Http() http = credentials.authorize(http) try: response, content = http.request( uri="https://www.googleapis.com/genomics/v1beta/%s?key=%s" % (path, api_key), method=method, body=json.dumps(body) if body else None, headers={'Content-Type': 'application/json; charset=UTF-8'}) except DeadlineExceededError: raise ApiException('API fetch timed out') # Log results to debug logging.debug("Response:") logging.debug(response) logging.debug("Content:") logging.debug(content) # Parse the content as json. content = json.loads(content) if response.status == 404: raise ApiException('API not found') elif response.status == 400: raise ApiException('API request malformed') elif response.status != 200: if 'error' in content: logging.error( "Error Code: %s Message: %s" % (content['error']['code'], content['error']['message'])) raise ApiException("Something went wrong with the API call. " "Please check the logs for more details.") return content
def _get_content(self, path, method='POST', body=None): # Genomics requires both the genomics scope and devstorage scope = [ 'https://www.googleapis.com/auth/genomics', 'https://www.googleapis.com/auth/devstorage.read_write' ] # The API Key may or may not be required. api_key = os.environ['API_KEY'] credentials = AppAssertionCredentials(scope=scope) http = httplib2.Http(cache=memcache) http = credentials.authorize(http) try: response, content = http.request( uri="https://www.googleapis.com/genomics/v1beta/%s?key=%s" % (path, api_key), method=method, body=json.dumps(body) if body else None, headers={'Content-Type': 'application/json; charset=UTF-8'}) except DeadlineExceededError: raise ApiException('API fetch timed out') # Log results to debug logging.debug("Response:") logging.debug(response) logging.debug("Content:") logging.debug(content) # Parse the content as json. content = json.loads(content) if response.status == 404: raise ApiException('API not found') elif response.status == 400: raise ApiException('API request malformed') elif response.status != 200: if 'error' in content: logging.error("Error Code: %s Message: %s", content['error']['code'], content['error']['message']) raise ApiException("Something went wrong with the API call. " "Please check the logs for more details.") return content
def CreateCalendar_func(group): credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/calendar') http_auth = credentials.authorize(Http()) calendar = build('calendar', 'v3', http=http_auth) service = discovery.build('calendar', 'v3', http=http_auth) calendar = { 'summary': group, 'timeZone': 'Europe/Moscow' } created_calendar = service.calendars().insert(body=calendar).execute() cal_id = created_calendar['id'] rule = { 'scope': { 'type': 'default' }, 'role': 'reader' } created_rule = service.acl().insert(calendarId=cal_id, body=rule).execute() return cal_id
def read_edm_file(self, edm_object_name): credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/devstorage.full_control') http = credentials.authorize(httplib2.Http(memcache)) gcs_service = build('storage', 'v1', http=http, developerKey=settings.DEVELOPER_KEY) data = memcache.get(edm_object_name) if data is not None: return data else: fh = io.BytesIO() request = gcs_service.objects().get_media(bucket=settings.BUCKET, object=edm_object_name.encode('utf8')) downloader = MediaIoBaseDownload(fh, request, chunksize=settings.CHUNKSIZE) done = False while not done: status, done = downloader.next_chunk() if status: logging.info('Download %d%%.' % int(status.progress() * 100)) logging.info('Download %s Complete!' % edm_object_name) data = fh.getvalue() memcache.add(edm_object_name, data, settings.EDM_CONTENT_MEMCACHE_TIME) return data
def images(): if not request.args or not request.args['key'] or request.args['key'] != IOS_API_KEY: abort(401) if not request.args or not request.args['name'] or not request.args['length']: logging.error("name: %s", request.args.get('name')) logging.error("length: %s", request.args.get('length')) abort(401) logging.info(request.args) name = request.args['name'] length = request.args['length'] if request.method == 'GET': credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/devstorage.read_write') http_auth = credentials.authorize(httplib2.Http()) logging.info("successfully created http_auth object") try: url = "https://www.googleapis.com/upload/storage/v1/b/" + _BUCKET_NAME + "/o?uploadType=resumable&name=" + name resp, content = http_auth.request(url, method="POST", headers={'Content-Length':'0', 'Content-Type':'application/json; charset=UTF-8', 'X-Upload-Content-Type':'image/jpeg', 'X-Upload-Content-Length':length}) return jsonify(resp) except HttpError, e: logging.error("response status: %s", e.resp.status) logging.error("response content: %s", e.content)
def _create_storage_api(self): credentials = AppAssertionCredentials(scope="https://www.googleapis.com/auth/devstorage.full_control") authorized_http = credentials.authorize(httplib2.Http(memcache)) storage = build("storage", "v1", http=authorized_http) return storage
def service(): credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') http = credentials.authorize(httplib2.Http(memcache)) return build("bigquery", "v2", http=http)
def Authorize(): credentials = AppAssertionCredentials(scope=GCE_SCOPE) return credentials.authorize(httplib2.Http(memcache))
__author__ = '[email protected] (Michael Manoochehri)' import httplib2 from apiclient.discovery import build from google.appengine.ext import webapp from google.appengine.ext.webapp.util import run_wsgi_app from oauth2client.appengine import AppAssertionCredentials # BigQuery API Settings SCOPE = 'https://www.googleapis.com/auth/bigquery' PROJECT_ID = 'XXXXXXXXXX' # REPLACE WITH YOUR Project ID # Create a new API service for interacting with BigQuery credentials = AppAssertionCredentials(scope=SCOPE) http = credentials.authorize(httplib2.Http()) bigquery_service = build('bigquery', 'v2', http=http) class StartQueryHandler(webapp.RequestHandler): def post(self): query_string = self.request.get('query') jobCollection = bigquery_service.jobs() jobData = { 'configuration': { 'query': { 'query': query_string, } } } try:
def get_gce_service(): credentials = AppAssertionCredentials(scope=GCE_SCOPE) http = credentials.authorize(httplib2.Http(memcache, 30)) return discovery.build('compute', API_VERSION, http=http)
from apiclient.errors import HttpError def get_constants (constant): with open('constants.json') as json_data: js = json.load(json_data) return js[constant] # BigQuery API Settings SCOPE = get_constants('SCOPE') projectId = get_constants('projectId') datasetId = get_constants('datasetId') # Create a new API service for interacting with BigQuery credentials = AppAssertionCredentials(scope=SCOPE) http = credentials.authorize(httplib2.Http()) bigquery_service = build('bigquery', 'v2', http=http) # Insert data in BigQuery def insert_to_bq(self, targetTableId, body) : try: response = bigquery_service.tabledata().insertAll( projectId=projectId, datasetId=datasetId, tableId=targetTableId, body=body).execute() except HttpError as err: if (err.content): return err.content
def __init__(self): # Create a new API service for interacting with BigQuery credentials = AppAssertionCredentials(scope=SCOPE) self.http = credentials.authorize(httplib2.Http()) self.service = build('bigquery', 'v2', http=self.http)
class GaeBigQueryClient(big_query_client.BigQueryClient): """Client for interacting with BigQuery, within app engine authentication.""" def __init__(self, env=None, project_id=None): """Overrides the default init to remove the need for credentials.""" super(GaeBigQueryClient, self).__init__(credential_file='', env=env, project_id=project_id) def _InitializeHttp(self): """Initializes the http provider.""" self._credentials = AppAssertionCredentials( scope=big_query_client.SCOPE) self._http = self._credentials.authorize(httplib2.Http()) def _GetFromCache(self, key): """Retrieves a value from the cache based on a key. Args: key: A unique key that identifies the item in the cache. Returns: Cached data if found, None if not. """ return memcache.get(key) def _AddToCache(self, key, value, duration=None): """Adds a value to the cache. Args: key: A unique key that identifies the item in the cache. value: The value to store. duration: The length of time (in seconds) to store the cached value. """ memcache.add(key, value, duration or DEFAULT_CACHE_DURATION) def Query(self, query, timeout=None, cache_duration=None, use_cache=True): """Returns cached data, or issues a Big Query and returns the response. Note that multiple pages of data will be loaded returned as a single data set. Args: query: The query to issue. timeout: The length of time (in seconds) to wait before checking for job completion. cache_duration: The length of time (in seconds) to store the result in the cache. use_cache: If false, do not use the cache. Returns: The query results. See big query's docs for the results format: http://goto.google.com/big_query_query_results """ if not use_cache: return super(GaeBigQueryClient, self).Query(query, timeout) query_hash = hashlib.md5(self.project_id + query).hexdigest() data = self._GetFromCache(query_hash) if data is None: data = super(GaeBigQueryClient, self).Query(query, timeout) try: self._AddToCache(query_hash, data, cache_duration) except ValueError, err: logging.error('Failed to save results to the cache: %s', err) else:
def authorize_request(): credentials = AppAssertionCredentials('https://www.googleapis.com/auth/drive') return credentials.authorize(Http())
def finish(self, more): credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/devstorage.full_control') http = credentials.authorize(httplib2.Http(memcache)) gcs_service = build('storage', 'v1', http=http, developerKey=settings.DEVELOPER_KEY) if more: name, ext = os.path.splitext(self.filename) media = MediaIoBaseUpload(self.fh, mimetype='text/plain', chunksize=1024 * 1024, resumable=True) object_name = 'dump/{}-{:d}.txt'.format(name, self.chunks) object_resource = {'name': object_name} req = gcs_service.objects().insert(bucket=settings.BUCKET, body=object_resource, media_body=media) req.execute() self.chunks += 1 self.fh = io.BytesIO() self.sourceObjects.append({'name': object_name}) else: if self.sourceObjects: composite_object_resource = {'contentType': 'text/plain'} compose_req_body = { 'sourceObjects': self.sourceObjects, 'destination': composite_object_resource } req = gcs_service.objects().compose( destinationBucket=settings.BUCKET, destinationObject='dump/{}'.format(self.filename), body=compose_req_body) req.execute() schedule = self.schedule_key.get() schedule.send_recipients_log = '/{0}/{1}'.format( settings.BUCKET, urllib.quote('dump/{}'.format(self.filename))) schedule.put() # delete chunks for object_name in self.sourceObjects: gcs_service.objects().delete( bucket=settings.BUCKET, object=object_name.get('name')).execute() else: if self.fh.getvalue(): media = MediaIoBaseUpload(self.fh, mimetype='text/plain', chunksize=1024 * 1024, resumable=True) name = 'dump/{}'.format(self.filename) object_resource = {'name': name} req = gcs_service.objects().insert(bucket=settings.BUCKET, body=object_resource, media_body=media) req.execute() schedule = self.schedule_key.get() schedule.send_recipients_log = '/{0}/{1}'.format( settings.BUCKET, urllib.quote('dump/{}'.format(self.filename))) schedule.put() else: logging.info('empty logEmail, cancel dump') return logging.info('logEmail dump(%d) ok' % self.count)
def get_gcs_service(): credentials = AppAssertionCredentials(scope=STORAGE_API_SCOPE) http = credentials.authorize(httplib2.Http(memcache, 30)) return discovery.build('storage', STORAGE_API_VERSION, http=http)
def build_calendar_service(): scope = 'https://www.googleapis.com/auth/calendar' credentials = AppAssertionCredentials(scope=scope) http = credentials.authorize(httplib2.Http()) service = discovery.build('calendar', 'v3', http=http) return service
import httplib2 from oauth2client.appengine import AppAssertionCredentials from apiclient import discovery from google.appengine.api import memcache # Modify the next line to specify your BigQuery project id. PROJECT_ID = 'bigquery-e2e' credentials = AppAssertionCredentials( scope='https://www.googleapis.com/auth/bigquery') bigquery = discovery.build('bigquery', 'v2', http=credentials.authorize(httplib2.Http(memcache)))
def __init__(self): credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/bigquery') self.http = credentials.authorize(httplib2.Http()) self.bigquery = discovery.build('bigquery', 'v2', http=self.http)
def __init__(self): credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/bigquery' ) self.http = credentials.authorize(httplib2.Http()) self.bigquery = discovery.build('bigquery', 'v2', http=self.http)
def getBigQueryService(scope=BQ_SCOPE): """Create a new API service for interacting with BigQuery """ credentials = AppAssertionCredentials(scope=scope) http = credentials.authorize(httplib2.Http()) return build('bigquery', 'v2', http=http)
from oauth2client.appengine import AppAssertionCredentials from apiclient.http import BatchHttpRequest import mapreduce.third_party.pipeline as pipeline import mapreduce.third_party.pipeline.common as pipeline_common import logging logger = logging.getLogger('pipeline') credentials = AppAssertionCredentials( scope=[ 'https://www.googleapis.com/auth/prediction', 'https://www.googleapis.com/auth/devstorage.full_control' ] ) http = credentials.authorize(httplib2.Http(memcache)) service = build('bigquery', 'v2', http=http) batch = BatchHttpRequest() class Check(base_handler.PipelineBase): def run(self, projectId, modelId, delays=10): result = service.trainedmodels().get( project=projectId, id=modelId ).execute() if result['trainingStatus'] == "RUNNING": delay = yield pipeline_common.Delay(seconds=delays) with pipeline.After(delay): yield PredictCheck(projectId, modelId, delays) else:
def authorize_request(): credentials = AppAssertionCredentials( 'https://www.googleapis.com/auth/drive') return credentials.authorize(Http())