Beispiel #1
0
def gcsFunction1(fileName=None,data=None):
#using an ordinary post request with file as the data stream
    my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                          max_delay=5.0,
                                          backoff_factor=2,
                                          max_retry_period=15)
    gcs.set_default_retry_params(my_default_retry_params)
    bucket_name = os.environ.get('BUCKET_NAME',
                       app_identity.get_default_gcs_bucket_name())
    bucket = '/' + bucket_name
    filename = bucket + '/'+fileName
    content_t=mimetypes.guess_type(data)
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    try:
        gcs_file = gcs.open(filename,
                    'w',
                    content_type=content_t,
                    options={'x-goog-meta-filename': fileName},
                    retry_params=write_retry_params)
        #get only the data stream
        data=data.split(',')[1]
        #convert data to proper binary format for saving
        data=base64.b64decode(data)
        gcs_file.write(data)
        gcs_file.close()
    except Exception as e:
        logging.exception(e)
        raise Exception(500,"Server Error:" + e) 
    return(True)
Beispiel #2
0
 def write(self, data):
     if self.mode == 'a':
         try:
             file = gcs.open(self.way)
             dataOld = file.read()
             file.close()
         except:
             dataOld = ''
         write_retry_params = gcs.RetryParams(backoff_factor=1.1)
         file = gcs.open(self.way,
                         'w',
                         content_type='text/plain',
                         retry_params=write_retry_params)
         file.write(dataOld + data.encode('utf-8'))
         file.close()
         return ("ok")
     else:
         write_retry_params = gcs.RetryParams(backoff_factor=1.1)
         file = gcs.open(self.way,
                         self.mode,
                         content_type='text/plain',
                         retry_params=write_retry_params)
         file.write(data.encode('utf-8'))
         file.close()
         return ("ok")
Beispiel #3
0
def gcsWrite(cont,iden,cType="file",fileName=None):
    #set storage parameters and options
    my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                          max_delay=5.0,
                                          backoff_factor=2,
                                          max_retry_period=15)
    gcs.set_default_retry_params(my_default_retry_params)
    #get the right storage folder 
    bucket_name = os.environ.get('BUCKET_NAME',
                       app_identity.get_default_gcs_bucket_name())
    bucket = '/' + bucket_name
    #get the time for including in filename for uniqueness
    now=datetime.datetime.now()
    #check content type: file or data and set the right parameters    
    if cType=="file":
        content_t=cont.mimetype
        #secure filename
        fileName=cont.filename
    else:
        #try to guess the data type based on data stream content
        content_t=mimetypes.guess_type(cont)
        #get only the data stream
        cont=cont.split(',')[1]
        #decode the data into proper format for image file
        cont=base64.b64decode(cont)
    #build the right filenames for storage to file system
    fileName=secure_filename(fileName)
    fileName1=iden+now.isoformat()+fileName
    filename = bucket + '/'+fileName1
    #re-set storage write parameters     
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    #try to write data to storage
    try:
        #open gcs file
        gcs_file = gcs.open(filename,
                    'w',
                    content_type=content_t,
                    options={'x-goog-meta-filename': fileName},
                    retry_params=write_retry_params)
        #check content type: file or data
        if cType=="file":
            #get file contents as data stream
            gcs_file.write(cont.stream.read())
        else:
            #data is already in the right format
            gcs_file.write(cont)
        #close gcs file   
        gcs_file.close()
    except Exception as e:
        logging.exception(e)
        raise Exception(500,str(e)) 
    return(filename,fileName)
    def write_to_bucket(self, bucket_name=None, folder_name = None, file_to_write=None, content = None, storage_meta= None,
                        content_type='application/json'):
        """


        :type content_type: object
        :param bucket_name:
        :param file_to_write:
        :return:
        """
        bucket_name = str(bucket_name)
        bucket = "/" + bucket_name
        if folder_name:
            bucket = bucket + "/" + folder_name
        filename = bucket + "/" + file_to_write


        #print filename
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        try:

            gcs_file = gcs.open(filename,
                                'w',
                                content_type=content_type,
                                options=storage_meta,
                                retry_params=write_retry_params)


        except Exception, e:
            print e
            raise Exception
Beispiel #5
0
    def post(self, category, name):
        """Saves a resource in the cloud storage

        Multiple files are possible, if multiple files are uploaded the 'name' needs to be 'multiple'.
        For multiple files the file name is take as name.
        If multiple fils are uploaded without 'multiple' only the last file is saved.
        The function can also gerenate a serving link, this is either public or private (not guessable).

        If category or name (without extension) is a user key it needs to belong to the loged in user.
        """
        link = request.form.get(
            'link', default='private')  # either public, private or False
        gcs_links = []
        api_links = []
        private_links = []
        links = []
        print "Category: " + str(category)
        try:
            category_key = ndb.Key(urlsafe=category)
        except:
            category_key = False

        for k, f in request.files.iteritems(multi=False):
            if name == 'multiple':
                name = f.filename
            try:
                name_key = ndb.Key(
                    urlsafe=os.path.splitext(os.path.basename(name))[0])
            except:
                name_key = False

            if category_key or name_key:
                user_key = category_key or name_key
                if not auth.is_authorized(user_key):
                    return abort(403)

            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            adr = "{}/{}/{}".format(GCS_BUCKET, category, name)
            gcs_file = gcs.open(adr,
                                'w',
                                content_type=f.mimetype,
                                options={'x-goog-meta-name': f.filename},
                                retry_params=write_retry_params)
            f.save(gcs_file)  # saves file to cloud storage
            gcs_file.close()
            f.close()
            gcs_links.append("/_ah/gcs" + adr)
            api_links.append("/api/v1/upload/" + category + '/' + name)
            links.append("/resource/" + '/' + category + '/' + name)
            if link == 'private':  #TODO implement public links
                blob_key = blobstore.create_gs_key('/gs' + adr)
                img_url = images.get_serving_url(blob_key=blob_key)
                private_links.append(img_url)

        return {
            'links': links,
            'private_links': private_links,
            'gcs_links': gcs_links,
            'api_links': api_links
        }
Beispiel #6
0
def saveImageInGCS(image_data):
    # ======================
    # Save file in GCS
    # ======================
    image_data = base64.b64decode(image_data)  #image_data.encode('utf-8')
    bucket_name = os.environ.get('BUCKET_NAME',
                                 app_identity.get_default_gcs_bucket_name())

    bucket = '/' + bucket_name
    filename = bucket + '/' + getImageHash(image_data) + '.png'
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type='image/png',
                        options={
                            'x-goog-meta-foo': 'foo',
                            'x-goog-meta-bar': 'bar'
                        },
                        retry_params=write_retry_params)
    gcs_file.write(image_data)
    gcs_file.close()

    gcs_object_name = '/gs' + filename
    # logging.info(gcs_object_name)
    blob_key = blobstore.create_gs_key(gcs_object_name)
    image_url = images.get_serving_url(blob_key)
    return image_url
Beispiel #7
0
def admin_panel_items_add():
    form = ItemUploadForm(request.form)
    form.name.data = get_from_request(request, 'name')
    form.description.data = get_from_request(request, 'description')

    categories = ItemCategory.all()

    choices = []
    for i in range(1, len(categories)):
        choices.append((i, categories[i].name()))
    form.category_id.choices = choices

    if form.validate_on_submit():
        item = Item()
        form.populate_obj(item)
        db.session.add(item)
        db.session.commit()
        item_image = request.files.get('image')
        if not item_image:
            flash("An item image is required!")
            return render_template('admin/add.html', form=form)
        filename = app.config['IMAGE_BUCKET_NAME'] + "/items/" + item.canon_name + ".png"
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open(filename,
                          'w',
                          content_type='image/png',
                          options={'x-goog-acl': 'public-read'},
                          retry_params=write_retry_params)
        gcs_file.write(item_image.read())
        gcs_file.close()
        flash('You have successfully created a new item!')
        return redirect(url_for('items.admin_add'))
    return render_template('admin/add.html', form=form)
Beispiel #8
0
    def create_file(self, filename):
        """Create a file.

    The retry_params specified in the open call will override the default
    retry params for this particular file handle.

    Args:
      filename: filename.
    """
        self.response.write('Creating file %s\n' % filename)

        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open(
            filename,
            'w',
            content_type='text/plain',
            # this metadata can be retrieved using cloudstorage.stat
            options={
                'x-goog-meta-foo': 'foo',
                'x-goog-meta-bar': 'bar'
            },
            retry_params=write_retry_params)
        gcs_file.write('abcde\n')
        gcs_file.write('f' * 1024 * 4 + '\n')
        gcs_file.close()
        self.tmp_filenames_to_clean_up.append(filename)
Beispiel #9
0
    def receive(self, mail_message):
        logging.info("Received a message from: " + mail_message.sender)
        logging.info("With subject: " + mail_message.subject)
# [END log_sender_handler]
# [START bodies]
        plaintext_bodies = mail_message.bodies('text/plain')
        html_bodies = mail_message.bodies('text/html')

        for content_type, body in html_bodies:
            decoded_html = body.decode()
            # ...
# [END bodies]
            logging.info("Html body of length %d.", len(decoded_html))
        for content_type, body in plaintext_bodies:
            plaintext = body.decode()
            logging.info("Plain text body of length %d.", len(plaintext))
# [START attachments]
        bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        logging.info("Attachments will be written to bucket " + bucket_name)
        for attach in mail_message.attachments:
            filename = attach[0]
            contents = attach[1]
            logging.info("Attachmend found: " + filename)
            # [START write to the bucket]
            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            gcs_file = gcs.open("/" + bucket_name + "/" + filename, 'w', content_type='text/plain', options={'x-goog-meta-foo': 'foo','x-goog-meta-bar': 'bar'}, retry_params=write_retry_params)
            gcs_file.write(contents.decode().encode('utf-8'))
            gcs_file.close()                     
Beispiel #10
0
def create_file():
    # check if the post request has the file part
    if 'file' not in request.files:
        return jsonify({'result': 'no file part'})
    file = request.files['file']
    # if user does not select file, browser also
    # submit a empty part without filename
    if file.filename == '':
        return jsonify({'result': 'no selected file'})
    if file and allowed_file(file.filename):
        bucket_name = os.environ.get('BUCKET_NAME',
                                     'test-push-172208.appspot.com')
        bucket = '/' + bucket_name
        filename = secure_filename(file.filename)
        filePath = bucket + '/' + filename
        # googlePath = 'https://storage.googleapis.com/' + filePath
        googlePath = 'https://developers.google.com/maps/documentation/javascript/examples/kml/westcampus.kml'
        try:
            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            gcs_file = gcs.open(filePath,
                                'w',
                                content_type='text/plain',
                                options={
                                    'x-goog-meta-foo': 'foo',
                                    'x-goog-meta-bar': 'bar'
                                },
                                retry_params=write_retry_params)
            gcs_file.write(file.read())
            gcs_file.close()
            return jsonify({'result': 'ok'})

        except Exception, e:
            logging.exception(e)
            return jsonify({'result': 'not too good'})
Beispiel #11
0
 def get (self):
     mymidnight = datetime.combine(datetime.today()-timedelta(hours=timezone), dtime(6, 0, 0, 0))
     mymidnight_timestamp = (mymidnight - datetime(1970, 1, 1)).total_seconds()
     bucket_name = os.environ.get(
         'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
     bucket = '/' + bucket_name + '/Users'
     index_file = cloudstorage.open(bucket + '/current_index')
     index = int(index_file.readline())
     for x in range(0, index + 1):
         #filename = '/withingsapp.appspot.com/Tokens/' + str(x)
         filename = bucket + '/' + str(x) + '/' + 'token'
         if self.FileExists(bucket+ '/' + str(x), 'token'): #(True):  # (self.FileExists('/withingsapp.appspot.com/Tokens/', str(x))):
             endtime = int (time.time())
             starttime = mymidnight_timestamp#endtime - 86400
             with cloudstorage.open(filename) as cloudstorage_file:
                 refresh_token = cloudstorage_file.readline()
                 access_token = cloudstorage_file.readline()
                 cloudstorage_file.close()
             url = GET_MEASURE_V2+'action=getintradayactivity&access_token='
             url = url+access_token
             url = url+'&startdate='+str(starttime)+'&enddate='+str(endtime)
             activity_req = urllib2.urlopen(url)
             actvity_read = activity_req.read()
             #filename = '/withingsapp.appspot.com/Activity/Raw/' + str(x)
             filename = bucket + '/' + str(x) + '/Dailyrecords/' + str((datetime.today()-timedelta(hours=timezone)).strftime('%Y%m%d')) + '/activity.json'
             write_retry_params = cloudstorage.RetryParams(backoff_factor=1.1)
             with cloudstorage.open(
                     filename, 'w', content_type='text/plain', options={
                         'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'},
                     retry_params=write_retry_params) as cloudstorage_file:
                 cloudstorage_file.write(actvity_read)
                 cloudstorage_file.close()
Beispiel #12
0
def editormd_image_upload():
    mimetypes.init()
    if 'editormd-image-file' not in request.files:
        return jsonify({"success": 0, "message": u"No file part"})
    file = request.files['editormd-image-file']
    if file.filename == '':
        return jsonify({"success": 0, "message": u"No selected file"})
    if file and allowed_file(file.filename):
        directory = "upload/{0}".format(datetime.now().strftime("%Y%m%d/%H"))
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        bucket = '/' + bucket_name
        filename = "{0}/{3}/{1}.{2}".format(bucket, slugify(file.filename.rsplit('.', 1)[0]).replace("-", "_"), file.filename.rsplit('.', 1)[1], directory)
        content_type = mimetypes.guess_type(filename)[0] or "application/octet-stream"
        write_retry_params = cloudstorage.RetryParams(backoff_factor=1.1)
        gcs_file = cloudstorage.open(filename,
                                     'w',
                                     content_type=content_type,
                                     options={'x-goog-acl': 'public-read'},
                                     retry_params=write_retry_params)
        gcs_file.write(file.read())
        gcs_file.close()
        gs = "/gs{0}".format(filename)
        blob_key = blobstore.create_gs_key(gs)
        url = images.get_serving_url(blob_key, size=app.config["SITE_POST_IMG_WIDTH"], crop=False, secure_url=True)
        return jsonify({"success": 1, "message": u"No allowed_file", "url": url})

    return jsonify({"success": 0, "message": u"No allowed_file"})
Beispiel #13
0
def set_last_end_time(project_id, bucket_name, end_time_str, offset):
    """ Write the end_time as a string value in a JSON object in GCS. 
        This file is used to remember the last end_time in case one isn't provided
    """
    # get the datetime object
    end_time = datetime.strptime(end_time_str, '%Y-%m-%dT%H:%M:%S.%fZ')
    delta = timedelta(seconds=offset)
    # Add offset seconds & convert back to str
    end_time_calc = end_time + delta
    end_time_calc_str = end_time_calc.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
    file_name = '{}.{}'.format(project_id, config.LAST_END_TIME_FILENAME)

    logging.debug("set_last_end_time - end_time_str: {}, end_time_Calc_str: {}".format(
            end_time_str, end_time_calc_str)
    )
    end_time_str_json = {
        "end_time": end_time_calc_str
    }
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    gcs_file = gcs.open('/{}/{}'.format(
        bucket_name, file_name),
                        'w',
                        content_type='text/plain',
                        retry_params=write_retry_params)
    gcs_file.write(json.dumps(end_time_str_json))
    gcs_file.close()

    return end_time_calc_str
Beispiel #14
0
def log_agent(agent):
    filename = _agent_filename()
    logging.info('Logging agent to: ' + filename)
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    with gcs.open(filename, 'w', content_type='text/plain', retry_params=write_retry_params) as cloudstorage_file:
        cloudstorage_file.write(agent.text)
    logging.info('agent log completed')
Beispiel #15
0
def download_export_chunk(idx, force):
    if idx > 10:
        return
    headers = {
        'Range': 'bytes=%d-%d' % (idx * CHUNK_SIZE, (idx + 1) * CHUNK_SIZE - 1)
    }
    fetch_result = urlfetch.fetch(
        'https://www.worldcubeassociation.org/results/misc/WCA_export.tsv.zip',
        headers=headers)

    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    filename = fname(idx)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type='application/octet-stream',
                        retry_params=write_retry_params)
    gcs_file.write(fetch_result.content)
    gcs_file.close()

    logging.info('Fetched %d bytes' % len(fetch_result.content))

    if len(fetch_result.content) < CHUNK_SIZE:
        deferred.defer(assemble_zip, idx + 1, force)
    else:
        deferred.defer(download_export_chunk, idx + 1, force)
def ExportRatingsReduce(user_id, values):
  filename = '/' + '/'.join([config.GetBucketName(), 'export', str(user_id)])
  write_retry_params = gcs.RetryParams(backoff_factor=1.1)
  output = gcs.open(
      filename, 'w', content_type='text/csv', retry_params=write_retry_params)
  writer = csv.writer(output, doublequote=False, escapechar='\\')
  writer.writerow(HEADER_NAMES)
  for value in values:
    url, rating, date, category_name, title = pickle.loads(value)
    date_string = date.strftime('%Y-%m-%d-%H%M%S')
    writer.writerow([
        date_string,
        unicode(url).encode('utf-8'),
        str(rating),
        unicode(category_name).encode('utf-8'),
        unicode(title).encode('utf-8')
    ])
  output.close()
  ExportRatingsResult(
      key=ndb.Key(ExportRatingsResult, user_id),
      in_progress=False,
      filename=filename,
      date=datetime.now(),
      download_key=os.urandom(32).encode('hex')).put()
  # Clean up the history dump after two days so that we don't have old
  # recommendations around (in case the user deletes their previous
  # recommendations).
  deferred.defer(
      _CleanUpOldExportResult,
      user_id,
      datetime.now(),
      _countdown=_EXPORT_RESULT_TTL.total_seconds())
Beispiel #17
0
def create_file(filename, content):
    """Create a file.

    The retry_params specified in the open call will override the default
    retry params for this particular file handle.

    Args:
      filename: filename.
    """
    logging.info('filename' + filename)
    import cloudstorage as gcs
    from google.appengine.api import app_identity

    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type='text/plain',
                        options={
                            'x-goog-meta-foo': 'foo',
                            'x-goog-meta-bar': 'bar',
                            'x-goog-acl': 'public-read'
                        },
                        retry_params=write_retry_params)
    gcs_file.write(content)
    gcs_file.close()
Beispiel #18
0
def upload():
    files = request.files['file']

    if files:
        filename = secure_filename(files.filename)
        mime_type = files.content_type

        if not allowed_file(files.filename):
            result = uploadfile(name=filename,
                                type=mime_type,
                                not_allowed_msg="File type not allowed")

        else:
            bucket_filename = BUCKET + secure_filename(filename)
            gcs_file = gcs.open(bucket_filename,
                                'w',
                                content_type='image/jpeg',
                                retry_params=gcs.RetryParams(
                                    initial_delay=0.2,
                                    max_delay=5.0,
                                    backoff_factor=2,
                                    max_retry_period=15))
            gcs_file.write(files.read())
            gcs_file.close()
            print bucket_filename

            # return json for js call back
            result = uploadfile(name=filename, type=mime_type)

        return simplejson.dumps({"files": [result.get_file()]})
Beispiel #19
0
def write_file(filename, contents, content_type='text/plain'):
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    with gcs.open(filename,
                  'w',
                  content_type=content_type,
                  retry_params=write_retry_params) as gcs_file:
        gcs_file.write(contents)
Beispiel #20
0
class ProxyPage(webapp2.RequestHandler):
    BLOCK_SIZE = 1024 * 1024  # 1MB at a time.
    RETRY_PARAMS = gcs.RetryParams(max_retries=3)

    @google_login_required
    def get(self, bucket, obj):
        gcs_path = '/%s/%s' % (bucket, urllib.quote(obj))
        logging.info('Opening %s using BLOCK_SIZE=%d', gcs_path,
                     self.BLOCK_SIZE)
        try:
            gcs_file = gcs.open(gcs_path, retry_params=self.RETRY_PARAMS)
            gcs_stat = gcs.stat(gcs_path, retry_params=self.RETRY_PARAMS)
        except gcs.ForbiddenError:
            logging.exception("ForbiddenError accessing path %s", gcs_path)
            self.abort(httplib.FORBIDDEN)
        except gcs.AuthorizationError:
            logging.exception("AuthorizationError accessing path %s", gcs_path)
            self.abort(httplib.UNAUTHORIZED)

        self.response.headers["Content-Type"] = gcs_stat.content_type

        content_size = 0L
        block_num = 0
        while True:
            block = gcs_file.read(self.BLOCK_SIZE)
            if not block:
                break
            self.response.write(block)
            content_size += len(block)
            block_num += 1
        logging.info("Wrote content from [%s]: %s blocks, %s bytes", gcs_path,
                     block_num, content_size)
Beispiel #21
0
def publishmenu(menuid):
    menudata = getmenu(menuid)

    tempdata = gettemplate(menudata['Template'])

    menuHTML = render_template(tempdata['TemplateFile'], menu=menudata)

    object = '/' + bucket + '/menus/' + menuid + '.html'

    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    with gcs.open(object,
                  'w',
                  content_type='text/html',
                  options={
                      'x-goog-acl': 'public-read',
                      'Cache-Control': 'no-cache'
                  },
                  retry_params=write_retry_params) as menu_file:
        menu_file.write(str(menuHTML))
        menu_file.close()

    menu_link = 'https://storage.googleapis.com/'+bucket+'/menus/'\
            +menuid+'.html'

    return menu_link
Beispiel #22
0
def post():
    form = PhotoForm(CombinedMultiDict((request.files, request.form)))
    if request.method == 'POST' and form.validate():
        filename = '%s.%s' % (str(uuid.uuid4()),
                              secure_filename(form.input_photo.data.filename))
        content_type = content_types[filename.split('.')[-1]]
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open('/%s/%s' % (bucket_name, filename), 'w',
                            retry_params=write_retry_params,
                            content_type=content_type,
                            options={'x-goog-acl': 'authenticated-read'})
        for _ in form.input_photo.data.stream:
            gcs_file.write(_)
        gcs_file.close()

        labels = get_labels(filename)
        tags = [translate_text(label.description) for label in labels]
        entity = Photo(id=filename, tags=tags,
                       parent=ndb.Key('User', 'default'))
        entity.put()

        for tag in tags:
            entity = ndb.Key('User', 'default', 'Tags', tag).get()
            if entity:
                entity.count += 1
            else:
                entity = Tags(count=1, id=tag,
                              parent=ndb.Key('User', 'default'))
            entity.put()
        return render_template('post.html', storage_path=storage_path,
                               filename=filename, tags=tags)
    else:
        return redirect(url_for('photos'))
Beispiel #23
0
def send_result_to_storage(self, data):
    bucket_name = 'urlbucket'
    bucket = '/' + bucket_name
    filename = bucket + '/sentiment_result.json'
    self.tmp_filenames_to_clean_up = []

    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type='text/json',
                        options={'x-goog-meta-foo': 'foo',
                                 'x-goog-meta-bar': 'bar'},
                        retry_params=write_retry_params)
    
    for text in data:
        for sentence in text['sentences']:
            sentence_row = {}
            sentence_row['TEXT'] = sentence['text']['content']
            sentence_row['SCORE'] = sentence['sentiment']['score']
            sentence_row['MAGNITUDE'] = sentence['sentiment']['magnitude']
            sentence_row = json.dumps(sentence_row)
            gcs_file.write(sentence_row + "\n")

    gcs_file.close()
    self.tmp_filenames_to_clean_up.append(filename)
Beispiel #24
0
def post():
    form = MessageForm(CombinedMultiDict((request.files, request.form)))
    if request.method == 'POST' and form.validate():
        name = request.form['input_name']
        message = request.form['input_message']
        if form.input_photo.data.filename:
            filename = '%s.%s' % (str(
                uuid.uuid4()), secure_filename(form.input_photo.data.filename))
            content_types = {
                'jpg': 'image/jpeg',
                'jpeg': 'image/jpeg',
                'png': 'image/png',
                'gif': 'image/gif'
            }
            content_type = content_types[filename.split('.')[-1]]
            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            gcs_file = gcs.open('/%s/%s' % (bucket_name, filename),
                                'w',
                                retry_params=write_retry_params,
                                content_type=content_type,
                                options={'x-goog-acl': 'public-read'})
            for _ in form.input_photo.data.stream:
                gcs_file.write(_)
            gcs_file.close()
            entry = Message(name=name, message=message, filename=filename)
        else:
            entry = Message(name=name, message=message, filename=None)
        entry.put()
        return render_template('post.html',
                               name=name,
                               timestamp=entry.timestamp)
    else:
        return redirect(url_for('messages'))
Beispiel #25
0
def make_blob_public(csv, folder, name=None):
    bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    filename = '/' + bucket_name + '/00_Reports/'+folder+'/'+name+'.csv'
    gcs_file = gcs.open(filename, 'w', content_type='csv', retry_params=write_retry_params)
    gcs_file.write(csv)
    gcs_file.close()
Beispiel #26
0
def upload_file_helper(uploaded_file):
    file_content = uploaded_file.file.read()  # read the file

    # edit the file name
    file_name = str(uploaded_file.filename).replace(" ", "-").replace(".", "-")  # remove spaces
    file_name += str(int(time.time()))  # add a timestamp at the end of the file

    # file type
    file_type = uploaded_file.type
    file_name += "." + file_type.split("/")[1]  # if type is image/png, add .png at the end

    # upload the file to Google Cloud Storage
    gcs_file = cloudstorage.open(
        GCS_BUCKET + '/' + file_name,
        'w',
        content_type=file_type,
        retry_params=cloudstorage.RetryParams(backoff_factor=1.1)
    )

    gcs_file.write(file_content)
    gcs_file.close()

    # get the URL
    url = 'http://localhost:8080/_ah/gcs' if is_local() else 'https://storage.googleapis.com'
    url += GCS_BUCKET + '/' + file_name

    # store the URL in the Datastore
    saved_file = UploadedFile(url=url)
    saved_file.put()

    return url
Beispiel #27
0
def StorageHandler(request, ident):
    if not ident == 'read':
        response = HttpResponse("", content_type='application/json')
    try:
        if request.method == 'GET':
            if (ident == 'list'):
                ans = list_bucket('/' + get_application_id() + '.appspot.com')
                response.write(
                    simplejson.dumps({
                        'error': 0,
                        'all_objects': ans
                    }))
            elif (ident == 'basic'):
                general(response)
            elif (ident == 'read'):
                nombre = request.GET.get('name', None)
                response = read_file(nombre)
            elif (ident == 'guid'):
                response.write(
                    simplejson.dumps({
                        'error': 0,
                        'uid': generarUID()
                    }))
            else:
                response.write(simplejson.dumps({'error': 0}))
        elif request.method == 'POST':
            archivo = request.FILES['file-0']
            uploaded_file_content = archivo.read()
            uploaded_file_filename = archivo.name
            uploaded_file_type = archivo.content_type
            nombreAnterior = request.POST.get('name', None)
            carpeta = request.POST.get('folder', '')
            if (not nombreAnterior is None):
                try:
                    gcs.delete(nombreAnterior)
                except:
                    pass
            nombre = '/' + app_identity.get_default_gcs_bucket_name(
            ) + carpeta + '/' + generarUID() + '-' + uploaded_file_filename
            write_retry_params = gcs.RetryParams(backoff_factor=1.1)
            gcs_file = gcs.open(nombre,
                                'w',
                                content_type=uploaded_file_type,
                                options={'x-goog-acl': 'public-read'},
                                retry_params=write_retry_params)
            gcs_file.write(uploaded_file_content)
            gcs_file.close()
            response.write(simplejson.dumps({'error': 0, 'id': nombre}))

    except Exception, e:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        response = HttpResponse("", content_type='application/json')
        response.write(
            simplejson.dumps({
                'error':
                1,
                'msg':
                'Error de servidor: ' +
                repr(traceback.format_tb(exc_traceback)) + '->' + str(e)
            }))
Beispiel #28
0
    def post(self, id):

        self.response.content_type = "application/json"

        if self._content is None:
            self.response.status = 400
            self.response.out.write(
                utils.createError(
                    400, "Couldn't decode content or invalid content-type"))

        # Trying to access card to see if user is allowed to
        request = self._service.timeline().get(id=id)
        try:
            card = request.execute()
        except HttpError as e:
            self.response.status = e.resp.status
            self.response.out.write(e.content)
            return

        # 2) Insert data into cloud storage
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        file_name = str(uuid.uuid4())
        gcs_file = gcs.open(bucket + "/" + file_name,
                            'w',
                            content_type=self._content_type,
                            retry_params=write_retry_params)
        gcs_file.write(self._content)
        gcs_file.close()

        # 3) Update card with attachment info
        if not "attachments" in card:
            card["attachments"] = []

        attachment = {
            "id":
            file_name,
            "contentType":
            self._content_type,
            "contentUrl":
            "%s/upload/mirror/v1/timeline/%s/attachments/%s" %
            (utils.base_url, card["id"], file_name),
            "isProcessing":
            False
        }

        card["attachments"].append(attachment)

        request = self._service.internal().timeline().update(id=card["id"],
                                                             body=card)

        try:
            result = request.execute()
        except HttpError as e:
            self.response.status = e.resp.status
            self.response.out.write(e.content)
            return

        self.response.status = 200
        self.response.out.write(json.dumps(result))
Beispiel #29
0
def _make_retry_params():
  """RetryParams structure configured to store access token in Datastore."""
  # Note that 'cloudstorage.set_default_retry_params' function stores retry
  # params in per-request thread local storage, which means it needs to be
  # called for each request. Since we are wrapping all cloudstorage library
  # calls anyway, it's more convenient just to pass RetryParams explicitly,
  # instead of making it a default for request with 'set_default_retry_params'.
  return cloudstorage.RetryParams(save_access_token=True)
    def testRetryParams(self):
        retry_params = cloudstorage.RetryParams(max_retries=0)
        cloudstorage.set_default_retry_params(retry_params)

        retry_params.max_retries = 1000
        with cloudstorage.open(TESTFILE, 'w') as f:
            self.assertEqual(0, f._api.retry_params.max_retries)

        with cloudstorage.open(TESTFILE, 'w') as f:
            cloudstorage.set_default_retry_params(retry_params)
            self.assertEqual(0, f._api.retry_params.max_retries)

        per_call_retry_params = cloudstorage.RetryParams()
        with cloudstorage.open(TESTFILE,
                               'w',
                               retry_params=per_call_retry_params) as f:
            self.assertEqual(per_call_retry_params, f._api.retry_params)