Ejemplo n.º 1
0
    def testStoreObjectInGCS_NotResizableMimeType(self):
        guid = 'aaaaa'
        project_id = 100
        object_id = '/%s/attachments/%s' % (project_id, guid)
        bucket_name = 'test_bucket'
        object_path = '/' + bucket_name + object_id
        mime_type = 'not_resizable_mime_type'
        content = 'content'

        self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
        app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)

        self.mox.StubOutWithMock(uuid, 'uuid4')
        uuid.uuid4().AndReturn(guid)

        self.mox.StubOutWithMock(cloudstorage, 'open')
        cloudstorage.open(object_path, 'w',
                          mime_type).AndReturn(fake.FakeFile())

        self.mox.ReplayAll()

        ret_id = gcs_helpers.StoreObjectInGCS(content, mime_type, project_id,
                                              gcs_helpers.DEFAULT_THUMB_WIDTH,
                                              gcs_helpers.DEFAULT_THUMB_HEIGHT)
        self.mox.VerifyAll()
        self.assertEquals(object_id, ret_id)
Ejemplo n.º 2
0
    def testProjectWithLogo(self):
        bucket_name = 'testbucket'
        logo_gcs_id = '123'
        logo_file_name = 'logo.png'
        project_pb = project_pb2.MakeProject('testProject',
                                             logo_gcs_id=logo_gcs_id,
                                             logo_file_name=logo_file_name)

        self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
        app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)

        self.mox.StubOutWithMock(gcs_helpers, 'SignUrl')
        gcs_helpers.SignUrl(bucket_name,
                            logo_gcs_id + '-thumbnail').AndReturn('signed/url')
        gcs_helpers.SignUrl(bucket_name, logo_gcs_id).AndReturn('signed/url')

        self.mox.ReplayAll()

        view = tracker_views.LogoView(project_pb)
        self.mox.VerifyAll()
        self.assertEquals('logo.png', view.filename)
        self.assertEquals('image/png', view.mimetype)
        self.assertEquals('signed/url', view.thumbnail_url)
        self.assertEquals(
            'signed/url&response-content-displacement=attachment%3B'
            '+filename%3Dlogo.png', view.viewurl)
Ejemplo n.º 3
0
    def testDeleteObjectFromGCS(self):
        object_id = 'aaaaa'
        bucket_name = 'test_bucket'
        object_path = '/' + bucket_name + object_id

        self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
        app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)

        self.mox.StubOutWithMock(cloudstorage, 'delete')
        cloudstorage.delete(object_path)

        self.mox.ReplayAll()

        gcs_helpers.DeleteObjectFromGCS(object_id)
        self.mox.VerifyAll()
Ejemplo n.º 4
0
    def post(self):
     self.response.headers.add_header('Access-Control-Allow-Origin', '*')
     self.response.headers['Content-Type'] = 'application/json'

     bucket_name = app_identity.get_default_gcs_bucket_name()
     uploaded_file = self.request.POST.get('uploaded_file')
     file_name = getattr(uploaded_file, 'filename', None)
     file_content = getattr(uploaded_file, 'file', None)
     real_path = ''

     if file_name and file_content:
      content_t = mimetypes.guess_type(file_name)[0]
      real_path = os.path.join('/', bucket_name, file_name)

      with cloudstorage.open(real_path, 'w', content_type=content_t,
       options={'x-goog-acl': 'public-read'}) as f:
       f.write(file_content.read())

      key = self._get_urls_for(file_name)
      self.response.write(key)


      sender_address = "Proyectos <*****@*****.**>"
      subject = "Evento nuevo"
      body = "Se ha registrado un nuevo evento"
      to = "Admin <*****@*****.**>"

      mail.send_mail(sender_address, to, subject, body)
Ejemplo n.º 5
0
    def get(self):
        last_end_time_str = ""
        try:
            # get the App Engine default bucket name to store a GCS file with last end_time
            bucket_name = os.environ.get(
                'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

            gcs_file = gcs.open('/{}/{}'.format(bucket_name,
                                                config.LAST_END_TIME_FILENAME))
            contents = gcs_file.read()
            logging.debug("GCS FILE CONTENTS: {}".format(contents))
            json_contents = json.loads(contents)
            last_end_time_str = json_contents["end_time"]
            gcs_file.close()
        except NotFoundError as nfe:
            logging.error("Missing file when reading from GCS: {}".format(nfe))
            last_end_time_str = None
        except Exception as e:
            logging.error("Received error when reading from GCS: {}".format(e))
            last_end_time_str = None

        # if there is not an existing file, create one
        try:
            if not last_end_time_str:
                self.set_last_end_time(bucket_name)
        except NotFoundError as nfe:
            logging.error("Missing file when writing to GCS: {}".format(nfe))
            last_end_time_str = None
        except Exception as e:
            logging.error("Received error when writing to GCS: {}".format(e))
            last_end_time_str = None

        self.response.headers['Content-Type'] = 'text/plain'
        self.response.status = 200
Ejemplo n.º 6
0
    def post(self):

        cytoscapeJSON = json.loads(self.request.get('cytoscapeJSON'))
        nodeList = []
        edgeList = []
        for node in  cytoscapeJSON['elements']['nodes']:
            nodeInfo = {}

            nodeInfo['data'] = node['data']
            nodeInfo['position'] = node['position']
            nodeList.append(nodeInfo)
            print nodeInfo
        for edge in cytoscapeJSON['elements']['edges']:
            edgeList.append([edge['data']['source'], edge['data']['target']])
            print edge

        graph = {'node': nodeList, 'edge': edgeList}
        gmlString = write_gml(graph)
        print gmlString
        bucket_name = os.environ.get('BUCKET_NAME',
                                     app_identity.get_default_gcs_bucket_name())
        gcs_filename = '/{0}/output.json'.format(bucket_name)
        blob_key = CreateFile(gcs_filename, gmlString)


        printStatement = '<a href="/serve/output.gml?key={0}">download file</a><br/>'.format(blob_key)
        #self.response.headers['Content-Type'] = "text/json"
        #self.response.headers["Content-Disposition"] = 'attachment; filename=output.json'
        #self.response.headers['url'] = "/serve/output.json?key={0}".format(blob_key)

        #self.response.headers.add_header('content-type', 'application/json', charset='utf-8')
        #self.response.out.write(json.dumps({'response':printStatement}))
        self.response.out.write(printStatement)
        print(blob_key)
Ejemplo n.º 7
0
    def get(self):
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(
            'Demo GCS Application running from Version: {}\n'.format(
                os.environ['CURRENT_VERSION_ID']))
        self.response.write('Using bucket name: {}\n\n'.format(bucket_name))
# [END get_default_bucket]

        bucket = '/' + bucket_name
        filename = bucket + '/demo-testfile'
        self.tmp_filenames_to_clean_up = []

        self.create_file(filename)
        self.response.write('\n\n')

        self.read_file(filename)
        self.response.write('\n\n')

        self.stat_file(filename)
        self.response.write('\n\n')

        self.create_files_for_list_bucket(bucket)
        self.response.write('\n\n')

        self.list_bucket(bucket)
        self.response.write('\n\n')

        self.list_bucket_directory_mode(bucket)
        self.response.write('\n\n')

        self.delete_files()
        self.response.write('\n\nThe demo ran successfully!\n')
Ejemplo n.º 8
0
def create_job(filename):
    """Erzeuge Job zum Upload einer Datastore-Backup-Datei zu Google BigQuery."""
    bigquery_client = bigquery.Client(project=gaetkconfig.BIGQUERY_PROJECT)
    if not gaetkconfig.BIGQUERY_DATASET:
        dataset = get_default_gcs_bucket_name()
    else:
        dataset = gaetkconfig.BIGQUERY_DATASET

    tablename = filename.split('.')[-2]
    # see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load
    resource = {
        'configuration': {
            'load': {
                'destinationTable': {
                    'projectId': gaetkconfig.BIGQUERY_PROJECT,
                    'datasetId': dataset,
                    'tableId': tablename,
                },
                'maxBadRecords': 0,
                'sourceUris': ['gs:/' + filename],
                'projectionFields': [],
                'sourceFormat': 'DATASTORE_BACKUP',
                'writeDisposition': 'WRITE_TRUNCATE',
            }
        },
        'jobReference': {
            'projectId': gaetkconfig.BIGQUERY_PROJECT,
            'jobId': 'import-{}-{}'.format(tablename, int(time.time())),
        },
    }

    # POST https://www.googleapis.com/bigquery/v2/projects/projectId/jobs
    job = bigquery_client.job_from_resource(resource)
    return job
Ejemplo n.º 9
0
    def get(self):

        self.response.write('<h1>grandcentralstation</h1>')

        config['www_bucket'] = os.environ.get('BUCKET_NAME' , app_identity.get_default_gcs_bucket_name() )
        config['thumbnail_enabled'] = 'True'

        filenames = [
            {'name':'data/a.jpg','mime':'image/jpeg'},
            {'name':'data/index.html','mime':'text/html'},
            {'name':'data/a b/index.html','mime':'text/html'},
        ]

        if os.environ.get('SERVER_SOFTWARE').startswith('Development'):
            self.response.write('\nDev~ detected. Adding dummy files.\n')
            #set up test bucket data    
            for file in filenames:
                data = open(file['name'],'r')
                gcs_file = gcs.open( '/%s/%s' % ( config['www_bucket'] ,file['name']) ,'w',content_type=file['mime'])
                gcs_file.write(data.read())
                data.close()
                gcs_file.close()
                self.response.write('\tAdded file \'%s\'.\n' % data.name)


        self.response.write('<p>I detected a fresh deploy so I populated the datastore with a default configuration.</p>')
Ejemplo n.º 10
0
    def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # Read the file's contents using the Blobstore API.
        # The last two parameters specify the start and end index of bytes we
        # want to read.
        data = blobstore.fetch_data(blob_key, 0, 6)

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(data)

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)
Ejemplo n.º 11
0
 def exists_attachments_for_entity_key(entity_key):
     bucket_name = os.environ.get(
         'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
     files = gcs.listbucket("/" + bucket_name + "/" + entity_key)
     for file in files:
         return True
     return False
Ejemplo n.º 12
0
 def __init__(self, name, content_type=None):
     self.name = name
     self.content_type = content_type
     self.file_id = str(uuid4())
     self.file = None
     self.__modes = {'w': {content_type: self.content_type}, 'r': {}}
     self.name = ("/" + app_identity.get_default_gcs_bucket_name() + "/" + self.name)
Ejemplo n.º 13
0
	def post(self):
		schema_version = int(self.request.get('schema'))
		st = self.request.get('source_time')
		if st is None or st == "":
			st = '0001-01-01 00:00:00'
		st = datetime.datetime.strptime(st, "%Y-%m-%d %H:%M:%S")
		delta_file = self.request.POST["delta_file"]
		delta_file.file.seek(0, os.SEEK_END)
		delta_file_len = delta_file.file.tell()
		delta_file.file.seek(0, os.SEEK_SET)
		dbversion = models.DBUpdate.query(models.DBUpdate.schema_version == schema_version, models.DBUpdate.source_time == st).get() or models.DBUpdate()
		dbversion.schema_version = schema_version
		dbversion.source_time = st
		dbversion.delta_gs_object_name = "/%s/v%d/zones_%s.json" % (app_identity.get_default_gcs_bucket_name(), dbversion.schema_version, dbversion.source_time.isoformat().replace(':', '_'))

		# NOTE not using with intentionally, sine the docs say we only want to close() if we want the file to be committed (ie, on success only)
		options = {'content-encoding': 'gzip'} if delta_file_len > 150 else {}
		gf = gcs.open(dbversion.delta_gs_object_name, "w", content_type="application/json; charset=utf-8", options=options)
		if delta_file_len > 150:
			with gzip.GzipFile(fileobj=gf, mode="wb") as gz:
				shutil.copyfileobj(delta_file.file, gz)
		else:
			shutil.copyfileobj(delta_file.file, gf)
		gf.close()
		dbversion.put()
Ejemplo n.º 14
0
def launch_job(job_id):
  """Launches a job given its key from MAPREDUCE_JOBS dict."""
  assert job_id in MAPREDUCE_JOBS, 'Unknown mapreduce job id %s' % job_id
  job_def = MAPREDUCE_JOBS[job_id].copy()
  # 256 helps getting things done faster but it is very easy to burn thousands
  # of $ within a few hours. Don't forget to update queue.yaml accordingly.
  job_def.setdefault('shards', 128)
  job_def.setdefault(
      'input_reader_spec', 'mapreduce.input_readers.DatastoreInputReader')
  job_def['mapper_params'] = job_def['mapper_params'].copy()
  job_def['mapper_params'].setdefault(
      'bucket_name', app_identity.get_default_gcs_bucket_name())

  if 'reducer_spec' in job_def:
    logging.info('Starting mapreduce job')
    pipeline = mapreduce_pipeline.MapreducePipeline(**job_def)
  else:
    logging.info('Starting mapper-only job')
    job_def['params'] = job_def.pop('mapper_params')
    pipeline = mapreduce_pipeline.MapPipeline(**job_def)

  pipeline.start(
      base_path=MAPREDUCE_PIPELINE_BASE_PATH, queue_name=MAPREDUCE_TASK_QUEUE)
  logging.info('Pipeline ID: %s', pipeline.pipeline_id)
  return pipeline.pipeline_id
Ejemplo n.º 15
0
def doupload(time_taken=0):
    """ Function responsible for the upload action that is performed """
    # starting the clock to calculate upload time 
    start_time = time.time()
    # getting the default application bucket based on the app identity
    bucket_name = os.environ.get('BUCKET_NAME',app_identity.get_default_gcs_bucket_name())
    # formatting the file name for creation 
    filename = '/'+bucket_name+ '/demo-testfile.csv'
    # getting the data object of the uploaded csv file
    data = request.files.get('data')
    # reading the data 
    raw = data.file.read()
    # initialising retry parameters for the request
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    # creating file on the bucket
    gcs_file = gcs.open(filename,'w',content_type='text/plain',options={'x-goog-meta-foo': 'foo','x-goog-meta-bar': 'bar'},retry_params=write_retry_params)
    # writing the data to the bucket object
    gcs_file.write(raw)
    # closing the bucket object
    gcs_file.close()
    # getting the end time of upload
    end_time = time.time()
    # calculating the total time taken 
    time_taken = end_time-start_time
    #retuning the data to the download template 
    return template('doupload_template',time_taken=time_taken)
Ejemplo n.º 16
0
 def post(self):
     bucket_name = self.request.headers.get(
         'X-Keyscores-Bucket-Name',
         app_identity.get_default_gcs_bucket_name()
     )
     bucket = '/' + bucket_name
     filename = self.request.headers['X-Keyscores-Filename']
     file_data = self.request.body
     content_type = self.request.headers['Content-Type'].split(';')[0]
     if is_content_type_excel(content_type):
         # Convert each sheet in the excel file into a csv file
         # and uploads the the csv file's data onto cloud storage
         # instead of the original excel file
         with xlrd.open_workbook(file_contents=self.request.body_file.read()) as wb:
             for sn in wb.sheet_names():
                 sh = wb.sheet_by_name(sn)
                 csv_file = StringIO()
                 c = csv.writer(csv_file)
                 first_row = sh.row_values(0)
                 for idx, col in enumerate(first_row):
                     clean_col = col.replace(" ", "_")
                     clean_col = re.sub('[()]', '', clean_col)
                     first_row[idx] = clean_col
                 c.writerow(first_row)
                 for r in range(1, sh.nrows):
                     c.writerow(sh.row_values(r))
                 filename = str(filename) + '_' + str(sn)
                 gcs_filename = os.path.join(bucket, filename + '.csv')
                 csv_file.seek(0)
                 data = csv_file.read()
                 write_gcs_file(
                     gcs_filename, data, 'text/csv'
                 )
Ejemplo n.º 17
0
    def upload_image(self, blob, filename):
        mime_type = 'image/png'
        if filename.split('.')[-1] == 'jpg' or filename.split(
                '.')[-1] == 'jpeg':
            mime_type = 'image/jpeg'

        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        bucket = '/' + bucket_name
        filename_final = bucket + '/' + str(uuid.uuid4())

        # Create a GCS file with GCS client.
        with gcs.open(filename_final, 'w') as f:
            f.write(blob)
            f.close()

        # Blobstore API requires extra /gs to distinguish against blobstore files.
        blobstore_filename = '/gs' + filename_final

        # Get the file's blob key
        blob_key = blobstore.create_gs_key(blobstore_filename)
        # Store it
        self.image = blob_key
        self.mime_type = mime_type
        self.url = get_serving_url(blob_key)
Ejemplo n.º 18
0
  def post(self):
    trace_uuid = str(uuid.uuid4())
    if 'GCS_BUCKET_NAME' not in os.environ:
      bucket_name = app_identity.get_default_gcs_bucket_name()
    else:
      bucket_name = os.environ['GCS_BUCKET_NAME']
    gcs_path = ('/' + bucket_name + '/' + trace_uuid + '.gz')
    gcs_file = gcs.open(gcs_path,
                        'w',
                        content_type='application/octet-stream',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(self.request.get('trace'))
    gcs_file.close()

    trace_object = trace_info.TraceInfo(id=trace_uuid)
    trace_object.prod = self.request.get('prod')
    trace_object.network_type = self.request.get('network_type')
    trace_object.remote_addr = os.environ["REMOTE_ADDR"]
    tags_string = self.request.get('tags')
    if re.match('^[a-zA-Z0-9,]+$', tags_string): # ignore non alpha-numeric tags
      trace_object.tags = tags_string.split(',')
    trace_object.user_agent = self.request.headers.get('User-Agent')
    trace_object.ver = self.request.get('product_version')
    trace_object.put()

    self.response.write(trace_uuid)
Ejemplo n.º 19
0
def make_blob_public(csv, folder, name=None):
    bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    filename = '/' + bucket_name + '/00_Reports/'+folder+'/'+name+'.csv'
    gcs_file = gcs.open(filename, 'w', content_type='csv', retry_params=write_retry_params)
    gcs_file.write(csv)
    gcs_file.close()
Ejemplo n.º 20
0
 def get_from_gcs(alerts_type, filename):
     with contextlib.closing(
             gcs.open("/" + app_identity.get_default_gcs_bucket_name() +
                      "/history/" + alerts_type + "/" +
                      filename)) as gcs_file:
         return gcs_file.read()
     logging.info('Reading alerts from GCS')
    def get(self):
        current_time = datetime.datetime.now()
        bucketname = os.environ.get('BUCKET_NAME',
                                    app_identity.get_default_gcs_bucket_name())
        createFile('/' + bucketname + '/jcssi.txt')
        message = '<p>The time is: %s</p>' % current_time
        user = users.get_current_user()
        #username = '******'
        #print user.email()
        login_url = users.create_login_url(self.request.path)
        logout_url = users.create_logout_url(self.request.path)
        #login_url = 'index.html'
        #logout_url = 'index.html'
        #self.response.out.write(message)

        template = template_env.get_template('index.html')
        context = {
            'mode': 1,
            'current_time': current_time,
            'user': user,
            'login_url': login_url,
            'logout_url': logout_url,
            'bucket': bucketname
        }
        self.response.out.write(template.render(context))
Ejemplo n.º 22
0
    def _create_google_cloud_storage(self, config):
        """
        Create GoogleCloudStorage instance
        :param config:  The config
        :type  config:  dict
        :return:        GoogleCloudStorage instance
        :rtype:         GoogleCloudStorage
        """

        from google.appengine.api import app_identity
        bucket = app_identity.get_default_gcs_bucket_name()
        if 'bucket' in config:
            bucket = config['bucket']

        storage_path = os.path.join(os.sep, self._storage_path)
        if 'directory' in config:
            directory = config['directory']
            # Check if absolute or relative path
            if not directory.startswith(os.sep):
                storage_path = os.path.join(storage_path, directory)
            else:
                storage_path = directory

        files_path = self._files_path
        if 'files_path' in config:
            files_path = config['files_path']

        options = {}

        if 'prefix' in config:
            options['prefix'] = config['prefix']

        from edmunds.storage.drivers.googlecloudstorage import GoogleCloudStorage
        return GoogleCloudStorage(self._app, bucket, storage_path, files_path,
                                  **options)
    def list_gcs_file_names(cls, bucket=None, folder='/'):
        """ Example usage :  for gcs_filename, filename in BlobFiles.list_gcs_file_names(folder='/upload') """

        for obj in gcs.listbucket('/%s%s' % (bucket or app_identity.get_default_gcs_bucket_name(), folder)):
            pbf = cls._query(cls.gcs_filename == obj.filename).get(projection=cls.filename)
            # yield result: the gcs_filename from GCS and the corresponding filename from BlobFiles
            yield obj.filename, (pbf.filename if pbf else '')
Ejemplo n.º 24
0
 def __init__(self, name, content_type=None):
     self.name = name
     self.content_type = "text/plain"
     self.file_id = str(uuid4())
     self.file = None
     self.__modes = {"w": {content_type: self.content_type}, "r": {}}
     self.name = "/" + app_identity.get_default_gcs_bucket_name() + "/" + self.name
Ejemplo n.º 25
0
    def run(self, mapper_key, reducer_key, file_name, language):
        """ run """
        logging.debug("filename is %s" % file_name)

        bucket_name = app_identity.get_default_gcs_bucket_name()
        mapper_params = {
            "entity_kind": "src.model.Data",
            "mapper": mapper_key,
            "reducer": reducer_key
        }

        output = yield mapreduce_pipeline.MapreducePipeline(
            file_name,
            mapper_spec="src.mapreduce.interpreter." + language +
            "_mapper_interpreter",
            reducer_spec="src.mapreduce.interpreter." + language +
            "_reducer_interpreter",
            input_reader_spec="mapreduce.input_readers.DatastoreInputReader",
            output_writer_spec=
            "mapreduce.output_writers.GoogleCloudStorageOutputWriter",
            mapper_params=mapper_params,
            reducer_params={
                "output_writer": {
                    "reducer": reducer_key,
                    "bucket_name": bucket_name,
                    "content_type": "text/plain",
                }
            },
            shards=64)

        # @TODO test and improve store output
        yield StoreOutput(output)
Ejemplo n.º 26
0
    def get(self):
        logging.info("SummaryTask starting...")

        # init class and variables
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        bucket = '/' + bucket_name
        trendManager = TrendManager()
        dataModelConverter = DataModelConverter()
        csvUtils = CsvUtils()
        cloudStorageUtils = CloudStorageUtils()

        previous_day_timestamp = int(time.time()) - Globals._1_DAY
        q_futures = []
        for region in self.getRegions():
            try:
                date = TimezoneAwareDate(region, self.request.get('date'))
                trendsJson = self.getTrends(region, trendManager)
                self.saveToCloudStorage(dataModelConverter, csvUtils,
                                        cloudStorageUtils, trendsJson, region,
                                        bucket, date)
                self.saveToDatastore(q_futures, trendsJson, region, date)
                self.deleteFromDatastore(
                    q_futures, region, previous_day_timestamp)

            except Exception, e:
                traceback.print_exc()
                Error(msg=str(e), timestamp=int(time.time())).put()
                SendEmail().send('Error on SummaryTask', str(e))
                self.retry()
Ejemplo n.º 27
0
	def post(self):
		bucket_name = os.environ.get('local-amenities.appspot.com', app_identity.get_default_gcs_bucket_name())
		self.response.headers['Content-Type'] = 'text/plain'
		self.response.write('Demo GCS Application running from Version: '
                        + os.environ['CURRENT_VERSION_ID'] + '\n')
		self.response.write('Using bucket name: ' + bucket_name + '\n\n')
		bucket = '/' + bucket_name
		filename  = bucket + '/' + cgi.escape(self.request.get('filename'))
		modelNumber = cgi.escape(self.request.get('model'))
		
		#Based on a user input transform into data model entities.
		if modelNumber == '1':
			self.AccessPostcodes(filename)
		elif modelNumber == '2':
			self.AccessOutcodes(filename)
		elif modelNumber == '3':
			self.AccessGP(filename)
		elif modelNumber == '4':
			self.AccessSupermarket(filename)
		elif modelNumber == '5':
			self.AccessTrainStation(filename)
		elif modelNumber == '6':
			self.AccessSchool(filename)
		else:
			self.response.write("Model number is not provided")
Ejemplo n.º 28
0
def hello():
    from google.appengine.api import app_identity
    import cloudstorage as gcs

    #GEt default bucket name
    default_bucket_name = app_identity.get_default_gcs_bucket_name()

    out = "Default bucket : " + default_bucket_name

    ## Write to cloud
    filename = '/' + default_bucket_name + '/new.txt'
    gcs_file = gcs.open(filename, 'w', content_type='text/plain')
    gcs_file.write('abcde\n')
    gcs_file.write('Hello!' + '\n')
    gcs_file.close()

    ## Read from cloud
    gcs_file = gcs.open(filename)
    contents = gcs_file.read()
    gcs_file.close()

    out += "<p>Contents :</p>"
    out += "<p>" + contents + "</p>"

    #files CANNOT be appended as the objects are immutable. to append you have to read, modify contents and overwrite

    #List items
    blist = gcs.listbucket('/' + default_bucket_name)
    print(list(blist))

    return out
Ejemplo n.º 29
0
def StorageHandler(request, ident):
    if not ident == 'read':
        response = HttpResponse("", content_type='application/json')
    try:
        if request.method == 'GET':
            if (ident == 'list'):
                ans = list_bucket('/' + get_application_id() + '.appspot.com')
                response.write(
                    simplejson.dumps({
                        'error': 0,
                        'all_objects': ans
                    }))
            elif (ident == 'basic'):
                general(response)
            elif (ident == 'read'):
                nombre = request.GET.get('name', None)
                response = read_file(nombre)
            elif (ident == 'guid'):
                response.write(
                    simplejson.dumps({
                        'error': 0,
                        'uid': generarUID()
                    }))
            else:
                response.write(simplejson.dumps({'error': 0}))
        elif request.method == 'POST':
            archivo = request.FILES['file-0']
            uploaded_file_content = archivo.read()
            uploaded_file_filename = archivo.name
            uploaded_file_type = archivo.content_type
            nombreAnterior = request.POST.get('name', None)
            carpeta = request.POST.get('folder', '')
            if (not nombreAnterior is None):
                try:
                    gcs.delete(nombreAnterior)
                except:
                    pass
            nombre = '/' + app_identity.get_default_gcs_bucket_name(
            ) + carpeta + '/' + generarUID() + '-' + uploaded_file_filename
            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            gcs_file = gcs.open(nombre,
                                'w',
                                content_type=uploaded_file_type,
                                options={'x-goog-acl': 'public-read'},
                                retry_params=write_retry_params)
            gcs_file.write(uploaded_file_content)
            gcs_file.close()
            response.write(simplejson.dumps({'error': 0, 'id': nombre}))

    except Exception, e:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        response = HttpResponse("", content_type='application/json')
        response.write(
            simplejson.dumps({
                'error':
                1,
                'msg':
                'Error de servidor: ' +
                repr(traceback.format_tb(exc_traceback)) + '->' + str(e)
            }))
Ejemplo n.º 30
0
 def get_github_url(self):
     bucket_name = os.environ.get(
         'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
     with cloudstorage.open("/" + bucket_name +
                            "/token/github.aip.token") as sfile:
         return "https://api.github.com/repos/shootsoft/PlutoVideoSnapshoter/releases/latest?access_token=" + sfile.readline(
         )
Ejemplo n.º 31
0
  def get(self):
 
    #login
    usr = users.get_current_user()
    if not usr:
      url = users.create_login_url(self.request.uri)
      url_linktext = 'Login'
      self.redirect(users.create_login_url(self.request.uri))
    else:
      url = users.create_logout_url(self.request.uri)
      url_linktext = 'Logout'
      
      #testing users deb model
      userlist = User.query().fetch(5)
      
      #get files from bucket for the user
      bucket_name = "/"+os.environ.get('BUCKET_NAME',app_identity.get_default_gcs_bucket_name())+"/"+str(usr)
      l_files=gcs.listbucket(bucket_name)

      #get shared files of the user
      sh_files = SharedFile.query(SharedFile.recipients == usr.email())
      result = sh_files.fetch(1000)

      template_values = {
        'url': url,
        'url_linktext': url_linktext,
        'user_name': usr,
        'files': l_files,
        'users': userlist,
        'shared_files': sh_files,
      }

      template = JINJA_ENVIRONMENT.get_template('index.html')
      self.response.write(template.render(template_values))
Ejemplo n.º 32
0
def gcsFunction1(fileName=None,data=None):
#using an ordinary post request with file as the data stream
    my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                          max_delay=5.0,
                                          backoff_factor=2,
                                          max_retry_period=15)
    gcs.set_default_retry_params(my_default_retry_params)
    bucket_name = os.environ.get('BUCKET_NAME',
                       app_identity.get_default_gcs_bucket_name())
    bucket = '/' + bucket_name
    filename = bucket + '/'+fileName
    content_t=mimetypes.guess_type(data)
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    try:
        gcs_file = gcs.open(filename,
                    'w',
                    content_type=content_t,
                    options={'x-goog-meta-filename': fileName},
                    retry_params=write_retry_params)
        #get only the data stream
        data=data.split(',')[1]
        #convert data to proper binary format for saving
        data=base64.b64decode(data)
        gcs_file.write(data)
        gcs_file.close()
    except Exception as e:
        logging.exception(e)
        raise Exception(500,"Server Error:" + e) 
    return(True)
Ejemplo n.º 33
0
def upload(request):
    qs = models.Elements.objects.all()
    filename = djqscsv.generate_filename(qs, append_datestamp=True)

    my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                          max_delay=5.0,
                                          backoff_factor=2,
                                          max_retry_period=15)
    gcs.set_default_retry_params(my_default_retry_params)
    bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
    bucket = '/' + bucket_name
    file_obj = djqscsv.render_to_csv_response(qs, filename)

    try:
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open(bucket+'/'+filename,
                            'w', 
                            content_type='text/csv',
                            options={'x-goog-meta-foo': 'foo',
                                     'x-goog-meta-bar': 'bar'},
                            retry_params=write_retry_params)
        gcs_file.write(file_obj.content)
        gcs_file.close()

    except Exception, e:  # pylint: disable=broad-except
        logging.exception(e)
Ejemplo n.º 34
0
  def post(self):
    trace_uuid = str(uuid.uuid4())
    if 'GCS_BUCKET_NAME' not in os.environ:
      bucket_name = app_identity.get_default_gcs_bucket_name()
    else:
      bucket_name = os.environ['GCS_BUCKET_NAME']
    gcs_path = ('/' + bucket_name + '/' + trace_uuid + '.gz')
    gcs_file = gcs.open(gcs_path,
                        'w',
                        content_type='application/octet-stream',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(self.request.get('trace'))
    gcs_file.close()

    trace_object = trace_info.TraceInfo(id=trace_uuid)
    trace_object.prod = self.request.get('prod')
    trace_object.network_type = self.request.get('network-type')
    trace_object.remote_addr = os.environ["REMOTE_ADDR"]
    tags_string = self.request.get('tags')
    if tags_string:
      # Tags are comma separated and should only include alphanumeric + '-'.
      if re.match('^[a-zA-Z0-9-,]+$', tags_string):
        trace_object.tags = tags_string.split(',')
      else:
        logging.warning('The provided tags string includes one or more invalid'
                        ' characters and will be ignored')
    trace_object.user_agent = self.request.headers.get('User-Agent')
    trace_object.ver = self.request.get('product-version')
    trace_object.config = self.request.get('config')
    trace_object.put()

    self.response.write(trace_uuid)
Ejemplo n.º 35
0
    def post(self, shortname):
        shortname = shortname.lower()
        tourney = Tournament.get_by_id(shortname)
        if not tourney:
            self.response.set_status(404)
            self.response.write('Tournament not found.')
            return

        codeword = self.request.get('codeword')
        if codeword != tourney.codeword:
            self.response.set_status(403)
            self.response.write("You didn't say the magic word.")
            return

        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        filename = '/%s/%s/export.js' % (bucket_name, shortname)

        try:
            gcs.delete(filename)
        except gcs.NotFoundError:
            pass

        tourney.key.delete()

        self.redirect('/')
Ejemplo n.º 36
0
    def get(self):
        """To be called by cron and only by cron."""
        # if 'X-AppEngine-Cron' not in self.request.headers:
        #     raise HTTP403_Forbidden('Scheduled backups must be started via cron')

        if not gaetkconfig.BACKUP_BUCKET:
            bucket = get_default_gcs_bucket_name()
        else:
            bucket = gaetkconfig.BACKUP_BUCKET

        today = datetime.date.today()
        kinds = [kind for kind in _get_all_datastore_kinds()]
        # if kind not in config.BACKUP_BLACKLIST]
        bucketname = '/'.join(
            [bucket, get_application_id(),
             today.strftime('%Y-%m-%d')])
        bucketname = bucketname.lstrip('/')
        params = {
            'name': 'ds',
            'gs_bucket_name': bucketname,
            'filesystem': 'gs',
            'queue': gaetkconfig.BACKUP_QUEUE,
            'kind': kinds,
        }
        logger.info('backup to %r %r', bucketname, params)

        taskqueue.add(
            url='/_ah/datastore_admin/backup.create',
            method='POST',
            target='ah-builtin-python-bundle',
            params=params,
        )
        self.return_text('OK')
    def post(self):
        user = users.get_current_user()
        if user is None:
            self.error(401)

        bucket_name = app_identity.get_default_gcs_bucket_name()
        uploaded_file = self.request.POST.get('uploaded_file')
        file_name = getattr(uploaded_file, 'filename', None)
        file_content = getattr(uploaded_file, 'file', None)
        real_path = ''
        if file_name and file_content:
            content_t = mimetypes.guess_type(file_name)[0]
            real_path = os.path.join('/', bucket_name, user.user_id(), file_name)

            with cloudstorage.open(real_path, 'w', content_type=content_t,
                                   options={'x-goog-acl': 'public-read'}) as f:
                f.write(file_content.read())
        self._create_note(user, file_name, real_path)

        logout_url = users.create_logout_url(self.request.uri)
        template_context = {
            'user': user.nickname(),
            'logout_url': logout_url,
        }
        self.response.out.write(
            self._render_template('main.html', template_context))
Ejemplo n.º 38
0
    def _create_note(self, user, title, content, attachments):

        note = Note(parent=ndb.Key("User", user.nickname()),
                    title=title,
                    content=content)
        note.put()

        if attachments:
            bucket_name = app_identity.get_default_gcs_bucket_name()
            for file_name, file_content in attachments:
                content_t = mimetypes.guess_type(file_name)[0]
                real_path = os.path.join('/', bucket_name, user.user_id(), file_name)

                with cloudstorage.open(real_path, 'w', content_type=content_t,
                                       options={'x-goog-acl': 'public-read'}) as f:
                    f.write(file_content.decode())

                key = blobstore.create_gs_key('/gs' + real_path)
                try:
                    url = images.get_serving_url(key, size=0)
                    thumbnail_url = images.get_serving_url(key, size=150, crop=True)
                except images.TransformationError, images.NotImageError:
                    url = "http://storage.googleapis.com{}".format(real_path)
                    thumbnail_url = None

                f = NoteFile(parent=note.key, name=file_name,
                             url=url, thumbnail_url=thumbnail_url,
                             full_path=real_path)
                f.put()
                note.files.append(f.key)

            note.put()
Ejemplo n.º 39
0
    def get(self):
        bucket_name = os.environ.get('BUCKET_NAME',
                                     app_identity.get_default_gcs_bucket_name())
        self.response.headers['Content-Type']='text/plain'
        self.response.write('Demo GCS Application runing from Version:'
                           +os.environ['CURRENT_VERSION_ID']+'\n')
        self.response.write('Using bucket name: '+bucket_name+'\n\n')

        bucket='/'+bucket_name

        filename=bucket+'/demo-testfile'
        self.tmp_filenams_to_clean_up=[]

        try:
            self.create_file(filename)
            self.response.write('\n\n')

            self.read_file(filename)
            self.response.write('\n\n')

            self.stat_file(filename)
            self.response.write('\n\n')
            
            self.crearte_file_for_list_bucket(bucket)
            self.response.write('\n\n')
            
            self.list_bucket_directory_mode(bucket)
            self.response.write('\n\n')
            
            self.list_bucket(bucket)
            self.response.write('\n\n')
        except Exception,e:
            logging.error(e)
            self.delete_files()
            self.response.write('\n\nThere was an error running the demo!')
Ejemplo n.º 40
0
def get_url(path, ttl=15):
    """Returns a signed URL for accessing a resource in the provided path.
    
    Args:
        path - path to the resource
        ttl - signed URL expiry time in minutes
        
    Returns:
        Signed URL to the resource
    """
    expiry = int(round(time.time() + ttl * 60))
    bucket = app_identity.get_default_gcs_bucket_name()
    cpath = '/' + bucket + '/' + path

    data = []
    data.append('GET')          # Method
    data.append('')             # MD5 digest value
    data.append('')             # Content-type
    data.append(str(expiry))    # Expiry date
    data.append(cpath)          # Path to the resource
    data_str = "\n".join(data)
    print(type(data_str))
    
    signing_key_name, signature = app_identity.sign_blob(str(data_str))
    
    url = 'https://storage.googleapis.com'
    url += cpath
    url += '?GoogleAccessId=' + app_identity.get_service_account_name()
    url += '&Expires=' + str(expiry)
    url += '&Signature=' + urllib.quote_plus(base64.b64encode(signature))
    
    return url
Ejemplo n.º 41
0
    def post(self):
        user = users.get_current_user()
        if user is None:
            self.error(401)

        bucket_name = app_identity.get_default_gcs_bucket_name()
        uploaded_file = self.request.POST.get('uploaded_file')
        file_name = getattr(uploaded_file, 'filename', None)
        file_content = getattr(uploaded_file, 'file', None)
        real_path = ''
        if file_name and file_content:
            content_t = mimetypes.guess_type(file_name)[0]
            real_path = os.path.join('/', bucket_name, user.user_id(),
                                     file_name)

            with cloudstorage.open(real_path,
                                   'w',
                                   content_type=content_t,
                                   options={'x-goog-acl': 'public-read'}) as f:
                f.write(file_content.read())
        self._create_note(user, file_name, real_path)

        logout_url = users.create_logout_url(self.request.uri)
        template_context = {
            'user': user.nickname(),
            'logout_url': logout_url,
        }
        self.response.out.write(
            self._render_template('main.html', template_context))
Ejemplo n.º 42
0
 def _gs_path(self, url):
     if self._gs_bucket is None:
         self._gs_bucket = app_identity.get_default_gcs_bucket_name()
         # gs_bucket = os.environ.get('BUCKET_NAME', gs_bucket)
     gs_bucket = self._gs_bucket
     gs_path = "/" + gs_bucket + url
     return gs_path
Ejemplo n.º 43
0
    def get(self):
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(
            'Demo GCS Application running from Version: {}\n'.format(
                os.environ['CURRENT_VERSION_ID']))
        self.response.write('Using bucket name: \n\n'.format(bucket_name))
        # [END get_default_bucket]

        bucket = '/' + bucket_name
        filename = bucket + '/demo-testfile'
        self.tmp_filenames_to_clean_up = []

        self.create_file(filename)
        self.response.write('\n\n')

        self.read_file(filename)
        self.response.write('\n\n')

        self.stat_file(filename)
        self.response.write('\n\n')

        self.create_files_for_list_bucket(bucket)
        self.response.write('\n\n')

        self.list_bucket(bucket)
        self.response.write('\n\n')

        self.list_bucket_directory_mode(bucket)
        self.response.write('\n\n')

        self.delete_files()
        self.response.write('\n\nThe demo ran successfully!\n')
Ejemplo n.º 44
0
    def post(self):
        user = users.get_current_user()
        if user:
            # app_default_bucket
            bucket_name = os.environ.get(
                'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
            image = self.request.get('image')
            public = self.request.POST.get('public-checkbox', None)

            check_box = False
            if public is not None:
                check_box = True

            file_name = self.request.params['image'].filename
            # Make file name unique
            name, extension = file_name.rsplit('.', 1)
            file_name = '{}-{}.{}'.format(name, uuid.uuid4(), extension)

            self._upload_to_gcs('/' + bucket_name + '/' + file_name,
                                image,
                                check_box=check_box)
            self._store_image_data(file_name, check_box, user.email())
            self.response.headers['Content-Type'] = 'text/plain'
            self.response.write('Complete!')
        else:
            self.show_login()
Ejemplo n.º 45
0
    def _create_note(self, user, title, content, attachments):

        note = Note(parent=ndb.Key("User", user.nickname()),
                    title=title,
                    content=content)
        note.put()

        if attachments:
            bucket_name = app_identity.get_default_gcs_bucket_name()
            for file_name, file_content in attachments:
                content_t = mimetypes.guess_type(file_name)[0]
                real_path = os.path.join('/', bucket_name, user.user_id(), file_name)

                with cloudstorage.open(real_path, 'w', content_type=content_t,
                                       options={'x-goog-acl': 'public-read'}) as f:
                    f.write(file_content.decode())

                key = blobstore.create_gs_key('/gs' + real_path)
                try:
                    url = images.get_serving_url(key, size=0)
                    thumbnail_url = images.get_serving_url(key, size=150, crop=True)
                except images.TransformationError, images.NotImageError:
                    url = "http://storage.googleapis.com{}".format(real_path)
                    thumbnail_url = None

                f = NoteFile(parent=note.key, name=file_name,
                             url=url, thumbnail_url=thumbnail_url,
                             full_path=real_path)
                f.put()
                note.files.append(f.key)

            note.put()
Ejemplo n.º 46
0
def get_price_info(league):
    bucket_name = app_identity.get_default_gcs_bucket_name()
    filename = '/%s/prices/%s.json' % (bucket_name, league)
    file = gcs.open(filename)
    data = json.load(file)
    file.close()
    return prices.getPriceGroups(data)
    def CreateFile(self, nombre, datos):
        my_default_retry_params = gcs.RetryParams(initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15)
        gcs.set_default_retry_params(my_default_retry_params)

        bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

        #bucket_name = os.environ.get('BUCKET_NAME', 'prueba')
        #print bucket_name
        #bucket_name = 'prueba'
        bucket = '/' + bucket_name
        filename = bucket + '/' + nombre

        print 'filename: '+filename

        #https://cloud.google.com/appengine/docs/python/googlecloudstorageclient/functions

        write_retry_params = gcs.RetryParams(backoff_factor=1.1)

        gcs_file = gcs.open(filename, 'w', content_type='image/jpeg', options={'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar', 'x-goog-acl': 'public-read'}, retry_params=write_retry_params)
        gcs_file.write(datos)
        gcs_file.close()

        blobstore_filename = '/gs' + filename

        key = blobstore.create_gs_key(blobstore_filename)


        #Si se encuentra en el servidor de Google
        if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'):
            return 'http://storage.googleapis.com'+filename
        #Si está en entorno de desarrollo local:
        else:
            return get_serving_url(key)
Ejemplo n.º 48
0
    def get(self):
        lines = []
        jps = models.JourneyPattern.query().order(models.JourneyPattern.line)
        for jp in jps:
            if jp.line not in lines:
                logging.info(jp.line)
                lines.append(jp.line)
            logging.info([s.get().name for s in jp.stops])      
          
        return 

#         return taskqueue.add(url='/timetable', queue_name='default', params={'file_name': 'tfl_1-BAK_-390106-y05.xml'})

        if self.is_local():
            taskqueue.add(url='/timetable', queue_name='default')
        else:
            bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
#             bucket_name = 'jeg376-tm470.appspot.com'
            filename = '/%s/stream.zip' % bucket_name
            gcs_file = cloudstorage.open(filename)
    
            tfl_zip = ZipFile(gcs_file, 'r')
            tfl_data = tfl_zip.read('LULDLRRiverTramCable.zip')
            tfl_data = ZipFile(StringIO.StringIO(tfl_data), 'r')
            
            tube_file_matcher = re.compile('^tfl_\d-\w{3}_.*\.xml$')
            for file_name in tfl_data.namelist():
                if tube_file_matcher.match(file_name):
                    taskqueue.add(url='/timetable', queue_name='default', params={'file_name': file_name})
Ejemplo n.º 49
0
    def post(self):
        traveler_type = self.request.get('type')
        start_at = self.request.get('start_at')
        file_name = self.request.get('file_name')
        
        logging.info('start At: %s' % start_at)
        
        if start_at:
            start_at = int(start_at)
        else:
            #delete all before starting
            f = []
            for c in models.Crowdedness.query(models.Crowdedness.traveler_type == traveler_type).iter(keys_only=True):
                f.append(c.delete_async())
            ndb.Future.wait_all(f)
        
        logging.info('Starting import...')

        if self.is_local():
            gcs_file = open('./%s' % file_name)
        else:
            bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
#             bucket_name = 'jeg376-tm470.appspot.com'
            filename = '/%s/%s' % (bucket_name, file_name)
            gcs_file = cloudstorage.open(filename)
        
        self.parse_file(gcs_file, traveler_type, file_name, start_at)
Ejemplo n.º 50
0
 def _error_csv_filename(self):
     meta = self.get_meta()
     return '/%s/%s/%s.csv' % (
         get_default_gcs_bucket_name(),
         meta.error_csv_subdirectory,
         self.pk
     )
Ejemplo n.º 51
0
    def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # Read the file's contents using the Blobstore API.
        # The last two parameters specify the start and end index of bytes we
        # want to read.
        data = blobstore.fetch_data(blob_key, 0, 6)

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(data)

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)
Ejemplo n.º 52
0
    def get(self):
        logging.info("SummaryTask starting...")

        # init class and variables
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        bucket = '/' + bucket_name
        trendManager = TrendManager()
        dataModelConverter = DataModelConverter()
        csvUtils = CsvUtils()
        cloudStorageUtils = CloudStorageUtils()

        previous_day_timestamp = int(time.time()) - Globals._1_DAY
        q_futures = []
        for region in self.getRegions():
            try:
                date = TimezoneAwareDate(region, self.request.get('date'))
                trendsJson = self.getTrends(region, trendManager)
                self.saveToCloudStorage(dataModelConverter, csvUtils,
                                        cloudStorageUtils, trendsJson, region,
                                        bucket, date)
                self.saveToDatastore(q_futures, trendsJson, region, date)
                self.deleteFromDatastore(q_futures, region,
                                         previous_day_timestamp)

            except Exception, e:
                traceback.print_exc()
                Error(msg=str(e), timestamp=int(time.time())).put()
                SendEmail().send('Error on SummaryTask', str(e))
                self.retry()
Ejemplo n.º 53
0
def hello():
    """Main function responsible for home page calculating the time to insert """
    #variable to determine the where app is running
    env = os.getenv('SERVER_SOFTWARE')
    main_string = "Time to Insert data ::"
    try:
        #getting the default bucket name from the application environment
        bucket_name = os.environ.get('BUCKET_NAME',app_identity.get_default_gcs_bucket_name())
        #file_name = "/"+bucket_name+"/all_month.csv"
        #creating the file name including the path of the bucket
        file_name = "/"+bucket_name+"/demo-testfile.csv"
        #getting db connection according to the environment by calling get_connection object  
        db = get_connection(env)
        # creating cursor object to execute the queries
        cursor = db.cursor()
        #initialising the databases and the tables if not created with the ionitialise_dbfunction which returns current cursor 
        cursor = initialise_db(cursor)
        #starting the clock for calculate insert time
        s_time = time.time()
        # inserting data into the using the bucket
        insert_into_table(cursor,file_name)
        # commiting the changes 
        db.commit()
        # stopping the clock
        e_time = time.time()
        # calculating the time required to insert the data
        t_time = str(e_time-s_time)
        #getting all earthquakes greater than magnitude 5,4,3,2 
        mag_5 = len(get_eq_gr_mag(cursor ,5))
        mag_4 = len(get_eq_gr_mag(cursor ,4))
        mag_3 = len(get_eq_gr_mag(cursor, 3))
        mag_2 = len(get_eq_gr_mag(cursor, 2))
        # getting all the earthquakes of magnitude equal t0 5,4,3,2
        m_5 = len(get_eq_equal_mag(cursor,5))
        m_4 = len(get_eq_equal_mag(cursor,4))
        m_3 = len(get_eq_equal_mag(cursor,3))
        m_2 = len(get_eq_equal_mag(cursor,2))
        #getting the weekly earthequake count for magnitude 5,4,3,2
        rows_5 = filter_result(get_eq_equal_mag(cursor ,5))
        rows_4 = filter_result(get_eq_equal_mag(cursor ,4))
        rows_3 = filter_result(get_eq_equal_mag(cursor ,3))
        rows_2 = filter_result(get_eq_equal_mag(cursor ,2))
        result_string = rows_5+"<br>"+rows_4+"<br>"+rows_3+"<br>"+rows_2  
        # creating a formatted output string for displaying resilus as http response
        output = ""
        output += "<h3>"+main_string+str(t_time)+"</h3>"
        output += "<h3>Number of earthquakes greater than maginitude 5: "+str(mag_5)+"</h3>"
        output += "<h3>Number of earthquakes greater than maginitude 4: "+str(mag_4)+"</h3>"
        output += "<h3>Number of earthquakes greater than maginitude 3: "+str(mag_3)+"</h3>"
        output += "<h3>Number of earthquakes greater than maginitude 2: "+str(mag_2)+"</h3>"
        output += "<h3>Number of earthquakes equal to maginitude 5: "+str(m_5)+"</h3>"
        output += "<h3>Number of earthquakes equal to maginitude 4: "+str(m_4)+"</h3>"
        output += "<h3>Number of earthquakes equal to maginitude 3: "+str(m_3)+"</h3>"
        output += "<h3>Number of earthquakes equal to maginitude 2: "+str(m_2)+"</h3>"
        # returning the output by appending the results to it 
        return output+"<h3>Number of Earthquakes per each week of maginitude 5,4,3,2<br>"+result_string+"</h3>"
    except Exception as e:
        # printing the exception message to the logs if exists for debugging purpose
        return str(e)
Ejemplo n.º 54
0
 def _error_csv_filename(self):
     meta = self.task.get_meta()
     return "/%s/%s/%s-shard-%s.csv" % (
         get_default_gcs_bucket_name(),
         meta.error_csv_subdirectory,
         self.task.pk,
         self.pk
     )
Ejemplo n.º 55
0
def read_blob(key):
  """Reads a Unicode string back from a given blob."""
  bucket = app_identity.get_default_gcs_bucket_name()
  path = '/%s/%d' % (bucket, key)
  with cloudstorage.open(path, 'r') as f:
    blob = f.read()
  data = blob.decode('utf-8', 'replace')
  return data
Ejemplo n.º 56
0
 def get(self, filename):
     self.response.out.write(
         '<html><head><title>eggs</title></head><body><pre>')
     self.response.out.write(
         '<a href="/gs/{bucket}/{filename}">{filename}</a>\n'.format(
             bucket=app_identity.get_default_gcs_bucket_name(),
             filename=filename))
     self.response.out.write('</pre></body></html>')