def gcsFunction1(fileName=None,data=None): #using an ordinary post request with file as the data stream my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) bucket = '/' + bucket_name filename = bucket + '/'+fileName content_t=mimetypes.guess_type(data) write_retry_params = gcs.RetryParams(backoff_factor=1.1) try: gcs_file = gcs.open(filename, 'w', content_type=content_t, options={'x-goog-meta-filename': fileName}, retry_params=write_retry_params) #get only the data stream data=data.split(',')[1] #convert data to proper binary format for saving data=base64.b64decode(data) gcs_file.write(data) gcs_file.close() except Exception as e: logging.exception(e) raise Exception(500,"Server Error:" + e) return(True)
def setUp(self): self.set_application(tst_app) self.setup_testbed() self.init_datastore_stub() self.init_memcache_stub() self.init_taskqueue_stub() self.init_app_identity_stub() self.init_mail_stub() self.register_search_api_stub() self.init_urlfetch_stub() self.init_blobstore_stub() self.init_modules_stub() cloudstorage.set_default_retry_params(None) # Create enterprise, sensortype and sensor self.e = Enterprise.Create() self.e.Update(name="Test Ent", timezone="Africa/Nairobi") self.e.put() self.owner = User.Create(self.e, phone=OWNER_NUM, notify=False) self.owner.Update(name=OWNER_NAME, currency="KES") self.owner.put() self.spedometer = SensorType.Create(self.e) schema = {'speed': {'unit': 'kph'}, 'narrative': {'type': 'string'}} self.spedometer.Update(name="Spedometer", schema=json.dumps(schema)) self.spedometer.put() self.vehicle_1 = Sensor.Create(self.e, TEST_SENSOR_ID, self.spedometer.key().id()) self.vehicle_1.Update(sensortype_id=self.spedometer.key().id(), name="Vehicle Sensor 1") self.vehicle_1.put()
def upload(request): qs = models.Elements.objects.all() filename = djqscsv.generate_filename(qs, append_datestamp=True) my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) bucket = '/' + bucket_name file_obj = djqscsv.render_to_csv_response(qs, filename) try: write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(bucket+'/'+filename, 'w', content_type='text/csv', options={'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) gcs_file.write(file_obj.content) gcs_file.close() except Exception, e: # pylint: disable=broad-except logging.exception(e)
def CreateFile(self, nombre, datos): my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) #bucket_name = os.environ.get('BUCKET_NAME', 'prueba') #print bucket_name #bucket_name = 'prueba' bucket = '/' + bucket_name filename = bucket + '/' + nombre print 'filename: '+filename #https://cloud.google.com/appengine/docs/python/googlecloudstorageclient/functions write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(filename, 'w', content_type='image/jpeg', options={'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar', 'x-goog-acl': 'public-read'}, retry_params=write_retry_params) gcs_file.write(datos) gcs_file.close() blobstore_filename = '/gs' + filename key = blobstore.create_gs_key(blobstore_filename) #Si se encuentra en el servidor de Google if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): return 'http://storage.googleapis.com'+filename #Si está en entorno de desarrollo local: else: return get_serving_url(key)
def __init__(self): my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) pass
def main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application)
def __init__(self, *args, **kwargs): # webapp framework invokes initialize after __init__. # webapp2 framework invokes initialize within __init__. # Python27 runtime swap webapp with webapp2 underneath us. # Since initialize will conditionally change this field, # it needs to be set before calling super's __init__. self._preprocess_success = False super(TaskQueueHandler, self).__init__(*args, **kwargs) if cloudstorage: cloudstorage.set_default_retry_params( cloudstorage.RetryParams(save_access_token=True))
def setUp(self): self.storage = default_storage self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_urlfetch_stub() self.testbed.init_app_identity_stub() self.testbed.init_files_stub() self.testbed.init_blobstore_stub() cloudstorage.set_default_retry_params(None) # cleanup storage stub for elem in cloudstorage.listbucket('/test_bucket/'): cloudstorage.delete(elem.filename)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self._old_max_keys = common._MAX_GET_BUCKET_RESULT common._MAX_GET_BUCKET_RESULT = 2 self.start_time = time.time() cloudstorage.set_default_retry_params(None)
def gcsWrite(cont,iden,cType="file",fileName=None): #set storage parameters and options my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) #get the right storage folder bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) bucket = '/' + bucket_name #get the time for including in filename for uniqueness now=datetime.datetime.now() #check content type: file or data and set the right parameters if cType=="file": content_t=cont.mimetype #secure filename fileName=cont.filename else: #try to guess the data type based on data stream content content_t=mimetypes.guess_type(cont) #get only the data stream cont=cont.split(',')[1] #decode the data into proper format for image file cont=base64.b64decode(cont) #build the right filenames for storage to file system fileName=secure_filename(fileName) fileName1=iden+now.isoformat()+fileName filename = bucket + '/'+fileName1 #re-set storage write parameters write_retry_params = gcs.RetryParams(backoff_factor=1.1) #try to write data to storage try: #open gcs file gcs_file = gcs.open(filename, 'w', content_type=content_t, options={'x-goog-meta-filename': fileName}, retry_params=write_retry_params) #check content type: file or data if cType=="file": #get file contents as data stream gcs_file.write(cont.stream.read()) else: #data is already in the right format gcs_file.write(cont) #close gcs file gcs_file.close() except Exception as e: logging.exception(e) raise Exception(500,str(e)) return(filename,fileName)
def setUp(self): """Setup for Cloudstorage testing.""" self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_app_identity_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_urlfetch_stub() self._old_max_keys = common._MAX_GET_BUCKET_RESULT common._MAX_GET_BUCKET_RESULT = 2 self.start_time = time.time() cloudstorage.set_default_retry_params(None) with cloudstorage.open(TESTFILE, 'w') as gcs: gcs.write(DEFAULT_COMPOSE_CONTENT)
def __init__(self, *args, **kwargs): # webapp framework invokes initialize after __init__. # webapp2 framework invokes initialize within __init__. # Python27 runtime swap webapp with webapp2 underneath us. # Since initialize will conditionally change this field, # it needs to be set before calling super's __init__. self._preprocess_success = False super(TaskQueueHandler, self).__init__(*args, **kwargs) if cloudstorage: cloudstorage.set_default_retry_params( cloudstorage.RetryParams( min_retries=5, max_retries=10, urlfetch_timeout=parameters._GCS_URLFETCH_TIMEOUT_SEC, save_access_token=True, _user_agent=self._DEFAULT_USER_AGENT))
def testRetryParams(self): retry_params = cloudstorage.RetryParams(max_retries=0) cloudstorage.set_default_retry_params(retry_params) retry_params.max_retries = 1000 with cloudstorage.open(TESTFILE, 'w') as f: self.assertEqual(0, f._api.retry_params.max_retries) with cloudstorage.open(TESTFILE, 'w') as f: cloudstorage.set_default_retry_params(retry_params) self.assertEqual(0, f._api.retry_params.max_retries) per_call_retry_params = cloudstorage.RetryParams() with cloudstorage.open(TESTFILE, 'w', retry_params=per_call_retry_params) as f: self.assertEqual(per_call_retry_params, f._api.retry_params)
def CreateFile(self, nombre, datos): my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) bucket_name = os.environ.get( 'BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) #bucket_name = os.environ.get('BUCKET_NAME', 'prueba') #print bucket_name #bucket_name = 'prueba' bucket = '/' + bucket_name filename = bucket + '/' + nombre print 'filename: ' + filename #https://cloud.google.com/appengine/docs/python/googlecloudstorageclient/functions write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(filename, 'w', content_type='image/jpeg', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar', 'x-goog-acl': 'public-read' }, retry_params=write_retry_params) gcs_file.write(datos) gcs_file.close() blobstore_filename = '/gs' + filename key = blobstore.create_gs_key(blobstore_filename) #Si se encuentra en el servidor de Google if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): return 'http://storage.googleapis.com' + filename #Si está en entorno de desarrollo local: else: return get_serving_url(key)
def load_from_gcs_bucket(filename,bucket=None): """ Return file payload from Google Cloud Storage """ file = None # Retry can help overcome transient urlfetch or GCS issues, such as timeouts. my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) if bucket is None: # Default bucket name bucket = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) try: if bucket is not None: file = gcs.open('/' + bucket + '/' + filename, 'r') else: logging.error('Bucket does not exist. Consider adding default bucket through GAE console') except gcs.NotFoundError: logging.error(filename + ' not found in default bucket') return file
import cloudstorage from google.appengine.api import app_identity import webapp2 import instagram import taxonomy BUCKET_NAME = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) JSON_OUT = '/%s/birds.json' % BUCKET_NAME cloudstorage.set_default_retry_params( cloudstorage.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15)) class MainPage(webapp2.RequestHandler): def get(self): self.response.headers['Content-Type'] = 'text/plain' self.response.write(BUCKET_NAME) instagram_posts = instagram.getPostsByEnglishName() taxonomy_dict = taxonomy.getHierarchicalDict( english_name_filter=instagram_posts) with cloudstorage.open(JSON_OUT, 'w', content_type='application/json',
from google.appengine.api import app_identity __author__ = 'fernando' CACHE_TIMEOUT = 86400*7 TEXTCHARS = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100))) logging = _logging.getLogger("matrufsc2_cache") logging.setLevel(_logging.WARNING) gcs.set_default_retry_params( gcs.RetryParams( initial_delay=0.2, max_delay=5.0, min_retries=10, max_retries=60, backoff_factor=2, max_retry_period=30, urlfetch_timeout=60 ) ) class CacheItem(object): __slots__ = ["value", "expire_on"] class LRUItem(object): __slots__ = ["value", "key", "updated_on", "accessed_on"] def __repr__(self):
""" データの取得 """ res = self.cache.get() if not bool(res): logging.debug(self.cache.name + "MISS") self.cache.add(self.edit(self.read())) res = self.cache.get() return res # GCSタイムアウト設定 RETRY_PARAMS = storage.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) storage.set_default_retry_params(RETRY_PARAMS) class Storage: """ Google Cloud Storageの汎用モデル """ @staticmethod def get_bucket_name(): """ バケット名を返す """ return os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) @staticmethod def write_file(filename, content, content_type): """ GCSにファイル保存
# [START imports] import os import cloudstorage as gcs import webapp2 # from google.appengine.api import images from PIL import Image import io from settings import SRC_BUCKET, DST_BUCKET, BUCKET_NAME, ALLOW_EXT, ROWS, COLS # [END imports] # [START retries] gcs.set_default_retry_params( gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15)) # [END retries] class MainPage(webapp2.RequestHandler): """Main page for GCS demo application.""" page_size = 100 def get(self): try: # [header of response] self.response.headers['Content-Type'] = 'text/plain' self.response.write( '\t\t\tChop Cutting Google AppEngine Application\n')
# -*- coding: utf-8 -*- ''' Created on Jul 18, 2013 @author: Edis Sehalic ([email protected]) ''' import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib')) import cloudstorage ''' appstats_MAX_STACK = 50 def webapp_add_wsgi_middleware(app): from google.appengine.ext.appstats import recording app = recording.appstats_wsgi_middleware(app) return app ''' cloudstorage.set_default_retry_params( cloudstorage.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retries=5, max_retry_period=60, urlfetch_timeout=30))
import cgi import cloudstorage import copy from google.appengine.ext import blobstore from google.appengine.api import images, urlfetch from google.appengine.datastore.datastore_query import Cursor from app import orm, mem, settings from app.util import * # @see https://developers.google.com/appengine/docs/python/googlecloudstorageclient/retryparams_class default_retry_params = cloudstorage.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retries=5, max_retry_period=60, urlfetch_timeout=30) cloudstorage.set_default_retry_params(default_retry_params) ########################################## ########## Extra system models! ########## ########################################## class Role(orm.BaseExpando): _kind = 66 # feature proposition (though it should create overhead due to the required drilldown process!) # parent_record = orm.SuperKeyProperty('1', kind='Role', indexed=False) # complete_name = orm.SuperTextProperty('2') name = orm.SuperStringProperty('1', required=True)
import jinja2 import os import cloudstorage as gcs import logging import webapp2 from google.appengine.ext import ndb BUCKET_NAME = 'your_bucket_name' MY_DEFAULT_RETRY_PARAMS = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(MY_DEFAULT_RETRY_PARAMS) def guestbook_key(guestbook_name='default_guestbook'): return ndb.Key('Guestbook', guestbook_name) jinja_environment = jinja2.Environment( loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), extensions=['jinja2.ext.autoescape'], autoescape=True) class Greeting(ndb.Model): content = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True)
import os import re import sys import webapp2 import uuid from perf_insights import trace_info import cloudstorage as gcs default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(default_retry_params) class UploadPage(webapp2.RequestHandler): def get(self): self.response.out.write(""" <html><body> <head><title>Performance Insights - Trace Uploader</title></head> <form action="/upload" enctype="multipart/form-data" method="post"> <div><input type="file" name="trace"/></div> <div><input type="submit" value="Upload"></div> </form><hr> </body></html>""") def post(self):
import webapp2 from google.appengine.api import app_identity from google.appengine.ext import blobstore from google.appengine.ext.webapp import blobstore_handlers from apiclient.discovery import build from apiclient.errors import HttpError from oauth2client.client import AccessTokenCredentials my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15) gcs.set_default_retry_params(my_default_retry_params) bucket = "/" + os.environ.get("BUCKET_NAME", app_identity.get_default_gcs_bucket_name()) class UploadHandler(webapp2.RequestHandler): _metainfo = None _content_type = None _content = None _token = None _service = None def dispatch(self): self._checkauth()
# [START sample] """A sample app that uses GCS client to operate on bucket and file.""" # [START imports] import os import cloudstorage from google.appengine.api import app_identity import webapp2 # [END imports] # [START retries] cloudstorage.set_default_retry_params( cloudstorage.RetryParams( initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15 )) # [END retries] class MainPage(webapp2.RequestHandler): """Main page for GCS demo application.""" # [START get_default_bucket] def get(self): bucket_name = os.environ.get( 'BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) self.response.headers['Content-Type'] = 'text/plain' self.response.write( 'Demo GCS Application running from Version: {}\n'.format(