def save_file(file_hash, file_content): blob_filename = '/%s/%s/%s' % (app_identity.get_default_gcs_bucket_name(), 'ics', file_hash) with gcs.open(blob_filename, 'w') as f: f.write(file_content) blob_store_filename = '/gs' + blob_filename return blobstore.create_gs_key(blob_store_filename)
def index(_logged_user): """ This is a example of file upload using Google Cloud Storage :return: """ success_url = router.to_path(upload) bucket = get_default_gcs_bucket_name() logging.info(bucket) url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) cmd = blob_facade.list_blob_files_cmd(_logged_user) blob_form = blob_facade.blob_file_form() deletar_path_base = router.to_path(delete) download_path_base = router.to_path(download) def localizar_blob(blob): dct = blob_form.fill_with_model(blob, 64) dct['delete_path'] = router.to_path(deletar_path_base, dct['id']) dct['download_path'] = router.to_path(download_path_base, blob_key=blob.blob_key, filename=dct['filename']) return dct blob_files = [localizar_blob(b) for b in cmd()] context = {'upload_url': url, 'blob_files': blob_files} return TemplateResponse(context, 'updown/home.html')
def index(_handler): upload_path = to_path(upload) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(upload_path, gs_bucket_name=bucket) contexto = {'upload_url': url} return TemplateResponse(contexto)
def bucket(self): """ Google Cloud Storage bucket. API Reference: https://googlecloudplatform.github.io/google-cloud-python/latest/storage/client.html#google.cloud.storage.client.Client.bucket """ return self.storage.bucket(app_identity.get_default_gcs_bucket_name())
def index(): categoria_query = Categoria.query_ordenada_por_nome() produto_query = Produto.query_ordenada_por_nome() categorias = categoria_query.fetch() produtos = produto_query.fetch() for cat in categorias: cat_key = cat.key cat_key_id = cat_key.id() cat.QtdProd = len(Produto.query_por_categoria_ordenada_por_nome(Categoria.get_by_id(int(cat_key_id))).fetch()) for prod in produtos: prod_key = prod.key prod_key_id = prod_key.id() # Angular salvar_path = router.to_path(salvarCategoria) #Agora que está sendo utilizado o angular, o path foi para um arquivo diferente, pois nele há json listar_path = router.to_path(listarCategoria) deletar_path = router.to_path(deletarCategoria) editar_path = router.to_path(editarCategoria) upload_path = router.to_path(updown.upload) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(upload_path, gs_bucket_name=bucket) comando = blob_facade.list_blob_files_cmd() arquivos = comando() download_path= router.to_path(updown.download) for arq in arquivos: arq.download_path=router.to_path(download_path,arq.key.id(),arq.filename) contexto = {'categoria_lista':categorias,'upload_path':url,'arquivos':arquivos, 'rest_salvar_path':salvar_path,'rest_list_path':listar_path, 'rest_delete_path':deletar_path,'rest_edit_path':editar_path} return TemplateResponse(contexto,template_path='/andris/admin.html')
def get(self, *args, **kwargs): bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) self.response.headers['Content-Type'] = 'text/plain' self.response.write('Demo GCS Application running from Version: ' + os.environ['CURRENT_VERSION_ID'] + '\n') self.response.write('Using bucket name: ' + bucket_name + '\n\n') bucket = '/' + bucket_name # filename = bucket + '/old_data/aid_table.txt' filename = '/sa-tools.appspot.com/old_data/aid_table.txt' self.read_file(filename) images.blobstore.create_gs_key()
def index(): context = {} context["jogo"] = Jogo() # gera url para save success_url = router.to_path(upload) context["groups"] = [] context["choice_groups"] = ALL_PERMISSIONS_LIST bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) context["upload_url"] = url context["nav_active"] = 'jogos' return TemplateResponse(context, template_path='/jogos/jogo_form.html')
def index(_handler): """ This is a example of file upload using Google Cloud Storage :return: """ success_url = router.to_path(upload) bucket = get_default_gcs_bucket_name() logging.info(bucket) url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) context = {'upload_url': url} return TemplateResponse(context, 'updown/home.html')
def index(_handler): com= blob_facade.list_blob_files_cmd() archives=com() download_path= router.to_path(download) delete_path = router.to_path(delete) for arc in archives: arc.delete_path = router.to_path(delete_path, arc.key.id(), arc.filename) arc.download_path= router.to_path(download_path, arc.key.id(), arc.filename) upload_path = router.to_path(upload) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(upload_path, gs_bucket_name=bucket) ctx = {'save_path': url, 'archives':archives} return TemplateResponse(ctx, '/admin/reports-management.html')
def start_template(filename, job_name, params={}, retries=5): """ Starts a dataflow template. Args: filename (str): The gcs filename for the template. job_name (str): The job name to use. params (dict): Job parameters. retries (int): Remaining retries. Set this to 0 to never retry. Returns: dict: The job response. """ global gcs_dataflow_path if gcs_dataflow_path is None: gcs_dataflow_path = 'gs://{}/dataflow'.format( app_identity.get_default_gcs_bucket_name()) body = { 'jobName': job_name, 'environment': { 'tempLocation': '{}/temp_jobs'.format(gcs_dataflow_path) }, 'parameters': params } request = get_client().projects().locations().templates().launch( projectId=app_identity.get_application_id(), gcsPath='{}/templates/{}'.format(gcs_dataflow_path, filename), location=os.environ.get('DATAFLOW_REGION') or 'us-central1', body=body) logging.info('Starting template...') try: return request.execute().get('job', {}) except HttpError as ex: if retries > 0: logging.warn( 'Retrying failed request ({} retries remaining)'.format( retries), exc_info=True) return start_template(filename, job_name, params, retries - 1) else: raise ex
def index(_handler): com = blob_facade.list_blob_files_cmd() archives = com() download_path = router.to_path(download) delete_path = router.to_path(delete) for arc in archives: arc.delete_path = router.to_path(delete_path, arc.key.id(), arc.filename) arc.download_path = router.to_path(download_path, arc.key.id(), arc.filename) upload_path = router.to_path(upload) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(upload_path, gs_bucket_name=bucket) ctx = {'save_path': url, 'archives': archives} return TemplateResponse(ctx, '/admin/reports-management.html')
def index(): success_url = router.to_path(upload) bucket = get_default_gcs_bucket_name() logging.info(bucket) url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) cmd = blob_facade.list_blob_files_cmd() blob_form = blob_facade.blob_file_form() deletar_path_base = router.to_path(delete) download_path_base = router.to_path(download) def localizar_blob(blob): dct = blob_form.fill_with_model(blob, 64) dct['delete_path'] = router.to_path(deletar_path_base, dct['id']) dct['download_path'] = router.to_path(download_path_base, blob.blob_key, dct['filename'].encode('utf8')) return dct blob_files = [localizar_blob(b) for b in cmd()] context = {'upload_url': url, 'blob_files': blob_files} return TemplateResponse(context, 'updown/home.html')
def index(_logged_user): user_key = _logged_user.key query = Autor.query(Autor.origin == user_key) autores = query.fetch() game_keys = [autor.destination for autor in autores] jogo_lista = ndb.get_multi(game_keys) form = GameFormTable() jogo_lista = [form.fill_with_model(jogo) for jogo in jogo_lista] editar_form_path = router.to_path(editar_form) deletar_form_path = router.to_path(deletar_form) success_url = router.to_path(upload) pergunta_url = router.to_path(pergunta) analise_url = router.to_path(analise) bucket = get_default_gcs_bucket_name() upload_url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) for jogo in jogo_lista: jogo['edit_path'] = '%s/%s' % (editar_form_path, jogo['id']) jogo['delete_path'] = '%s/%s' % (deletar_form_path, jogo['id']) contexto = {'jogo_lista': jogo_lista, "upload_url": upload_url, "pergunta_url": pergunta_url, "analise_url": analise_url} return TemplateResponse(contexto)
def api_gists(): """Gists Endpoint""" if request.method == "GET": gists = GistModel.query().fetch(20) resp = Response(json.dumps(gists, cls=NDBModelEncoder), mimetype="application/json", status=200) return resp elif request.method == "POST": gist = GistModel() gist.added_by = request.form["nick"] gist.gist = request.form["gist"] # add image if available image = request.files["img"] if image is not None: filename = secure_filename(image.filename) gcs_filename = "/" + get_default_gcs_bucket_name() + "/" + filename blob_key = CreateFile(gcs_filename, request.files["img"]) gist.image = BlobKey(blob_key) key = gist.put() resp = Response(json.dumps(gist, cls=NDBModelEncoder), mimetype="application/json", status=201) return resp else: raise RuntimeError("Unimplemented HTTP method")
def index(_logged_user): success_url = router.to_path(upload) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) cmd = blob_facade.list_blob_files_cmd(_logged_user) blob_files = cmd() delete_path = router.to_path(delete) download_path = router.to_path(download) blob_file_form = blob_facade.blob_file_form() def localize_blob_file(blob_file): blob_file_dct = blob_file_form.fill_with_model(blob_file, 64) blob_file_dct['delete_path'] = router.to_path(delete_path, blob_file_dct['id']) blob_file_dct['download_path'] = router.to_path(download_path, blob_file.blob_key, blob_file_dct['filename']) return blob_file_dct localized_blob_files = [localize_blob_file(blob_file) for blob_file in blob_files] context = {'upload_url': url, 'blob_files': localized_blob_files} return TemplateResponse(context, 'updown/home.html')
def edit(_handler, **jogos_properties): if jogos_properties.get('files'): blob_infos = _handler.get_uploads("files[]") blob_key = blob_infos[0].key() avatar = to_path(download, blob_key) jogos_properties['avatar'] = avatar jogos_properties.pop("files", None) obj_id = jogos_properties.pop("key_id", None) if not isinstance(jogos_properties.get('groups'), list): jogos_properties['groups'] = [jogos_properties.get('groups')] # cmd = jogo_facade.update_jogo_cmd(obj_id, **jogos_properties) try: cmd = Jogo(**jogos_properties) cmd.put() except CommandExecutionException: success_url = router.to_path(edit) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket) context = {'errors': {}, 'upload_url': url, 'jogo': jogos_properties} return TemplateResponse(context, template_path='/jogos/jogo.html') sleep(0.5) return RedirectResponse(router.to_path(jogos))
"'self'", "data:", "s.ytimg.com", "*.google-analytics.com", "*.googleusercontent.com", "*.gstatic.com", "p.typekit.net", "https://p.typekit.net", ) CSP_CONNECT_SRC = ( "'self'", "plus.google.com", "www.google-analytics.com", ) BUCKET_KEY = get_default_gcs_bucket_name() DEFAULT_FILE_STORAGE = 'google.appengine.api.blobstore.blobstore_stub.BlobStorage' DJANGAE_RUNSERVER_IGNORED_FILES_REGEXES = [ '^.+$(?<!\.py)(?<!\.yaml)(?<!\.html)', ] # Note that these should match a directory name, not directory path: DJANGAE_RUNSERVER_IGNORED_DIR_REGEXES = [ r"^google_appengine$", r"^bower_components$", r"^node_modules$", r"^sitepackages$", ]
def bucket(self): return self.storage.bucket(app_identity.get_default_gcs_bucket_name())
def form(): upload_path = to_path(upload) bucket = get_default_gcs_bucket_name() url = blobstore.create_upload_url(upload_path, gs_bucket_name=bucket) ctx = {'salvar_path': url} return TemplateResponse(ctx, 'upload_form.html')
CSP_IMG_SRC = ( "'self'", "data:", "s.ytimg.com", "*.google-analytics.com", "*.googleusercontent.com", "*.gstatic.com", "p.typekit.net", ) CSP_CONNECT_SRC = ( "'self'", "plus.google.com", "www.google-analytics.com", ) BUCKET_KEY = get_default_gcs_bucket_name() DEFAULT_FILE_STORAGE = 'google.appengine.api.blobstore.blobstore_stub.BlobStorage' DJANGAE_RUNSERVER_IGNORED_FILES_REGEXES = [ '^.+$(?<!\.py)(?<!\.yaml)(?<!\.html)', ] # Note that these should match a directory name, not directory path: DJANGAE_RUNSERVER_IGNORED_DIR_REGEXES = [ r"^google_appengine$", r"^bower_components$", r"^node_modules$", r"^sitepackages$", ]
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from google.appengine.api.app_identity import app_identity import xmltodict from google.appengine.api import urlfetch from google.appengine.ext import blobstore bucket_identity = app_identity.get_default_gcs_bucket_name() def index(_write_tmpl): _write_tmpl('templates/info.html') def ler_arquivo_dict(filename): blobstore_filename = '/gs/{0}/{1}'.format(bucket_identity, filename) blob_key = blobstore.create_gs_key(blobstore_filename) blob_reader = blobstore.BlobReader(blob_key) conteudo = blob_reader.read() dict = xmltodict.parse(conteudo, process_namespaces=True) return dict def listar_cidades(_json): dictCidades = ler_arquivo_dict('cidadesede.xml') listaCidades = [] # INICIALIZA A LISTA DE CIDADES. for cidade in dictCidades['collection']['http://www.portaltransparencia.gov.br/copa2014:cidadeSede']: # PROCURE POR CIDADE NO DICTCIDADE listaCidades.append(cidade['descricao']) # ADICIONE NA LISTA O VALOR(NOME DA CIDADE) DA CHAVE "DESCRICAO" DO DICIONARIO CIDADE. _json(listaCidades, '') # Transforme em JSON PROTEGIDO, A LISTA CIDADES.
import StringIO import logging from PIL import Image from datetime import datetime from google.appengine.api import urlfetch, images from google.appengine.api.app_identity import app_identity from google.appengine.api.blobstore import blobstore import cloudstorage from methods.mapping import timestamp MAX_SIZE = 960.0 _BUCKET = app_identity.get_default_gcs_bucket_name() def _resize(image, size): width, height = image.size logging.info("image size is %sx%s", width, height) if width > size or height > size: ratio = min(size / width, size / height) new_size = int(width * ratio), int(height * ratio) logging.info("resizing to %sx%s", *new_size) image = image.resize(new_size, Image.ANTIALIAS) return image def _save(image, filename): image_file = cloudstorage.open(filename, "w", 'image/png') try: image.save(image_file, 'PNG') except: logging.warning('can not save PNG')
def _get_default_bucket(): global _default_bucket if _default_bucket: return _default_bucket _default_bucket = app_identity.get_default_gcs_bucket_name() return _default_bucket
def create_upload_url(application_name): return blobstore.create_upload_url( "/upload/finish/%s" % application_name, gs_bucket_name="%s/%s" % (app_identity.get_default_gcs_bucket_name(), application_name))