Beispiel #1
0
    def post(self):
        uploaded_file = self.request.POST.get("file")
        uploaded_file_content = uploaded_file.file.read()
        uploaded_file_filename = uploaded_file.filename
        uploaded_file_type = uploaded_file.type
        bucket_name = 'pick-up-sports-images'

        # This write_retry_params params bit isn't essential, but Google's examples recommend it
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open("/" + bucket_name + "/" + uploaded_file_filename,
                            "w",
                            content_type=uploaded_file_type,
                            retry_params=write_retry_params)
        gcs_file.write(uploaded_file_content)
        gcs_file.close()

        image_url = 'https://storage.googleapis.com' + "/" + bucket_name + "/" + uploaded_file_filename

        location_name = self.request.get('location_name')
        address = self.request.get('address')
        sport_theme_name = self.request.get('sport_theme_name')
        tag_names_string = self.request.get('tags')
        tag_name_list = tag_names_string.split()

        current_user = users.get_current_user()
        if current_user:
            creator = create_or_get_creator(current_user.email())
            creator_email = creator.email
        else:
            creator_email = None

        # create location report
        create_location_report(location_name, address, sport_theme_name,
                               creator_email, tag_name_list, image_url)
        self.redirect('/')
Beispiel #2
0
    def post(self):
        image = self.request.get('image')
        name = self.request.get('name')
        content_type = self.request.get('contentType')

        bucket_name = generate_bucket_filename(name)

        with gcs.open(bucket_name, 'w', content_type=content_type) as f:
            f.write(image)

        blobstore_filename = generate_blobstore_filename(name)
        blob_key = blobstore.create_gs_key(blobstore_filename)
        restaurant_image_url = images.get_serving_url(blob_key)

        image_model = ImageModel(name=name,
                                 bucket_name=bucket_name,
                                 blob_key=blob_key)
        image_key = image_model.put()

        self.response.headers['Content-Type'] = 'application/json'
        self.response.write(json.dumps({
            'url': restaurant_image_url,
            'name': name,
            'mimetype': content_type,
            'id': image_key.id()
        }))
Beispiel #3
0
    def initialize_ds_names(local_dir=None, size_limit=None):
        for name,file in NamingGenerator.FILES.iteritems():
            valid_items = []
            delim = ""

            if local_dir is None:
                gcs_file = cloudstorage.open(filename=NamingGenerator.PATH_BASE + file, mode='r')
            else:
                gcs_file = open(local_dir + file, mode='r')
            for line in gcs_file:
                line = line.strip()
                if not line or\
                        len(line.split(" ")) > 1 or\
                        line.find("-") > 0 or\
                        len(line) > 8:
                    continue

                valid_items.append(line.capitalize())

            entry = models.Naming(id=name)
            random.shuffle(valid_items)
            if size_limit is None:
                entry.items = valid_items
            else:
                entry.items = valid_items[0:size_limit]
            entry.put()
  def post(self):
    payload = self.request.get('data').encode(constants.UTF8_ENCODING_LABEL)
    hash = binascii.crc32(payload) & 0xffffffff
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    filename = constants.CLOUD_FILE_PATTERN % (constants.CLOUD_STORAGE_BUCKET,
                                               hash)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type=constants.OUTPUT_CONTENT_TYPE_TEXT,
                        retry_params=write_retry_params)
    gcs_file.write(payload)
    gcs_file.close()

    # Now let's train the model!
    result = service.trainedmodels().insert(
        project=constants.PROJECT_ID,
        body={
            'id': str(hash),
            'storageDataLocation': filename[1:],
            'modelType': 'REGRESSION',
        }).execute()

    self.response.headers['Content-Type'] = constants.OUTPUT_CONTENT_TYPE_TEXT
    # self.response.out.write('Result: ' + repr(result))
    self.response.write(hash)
    def CreateFile(filename, imageFile):
        with gcs.open(filename, 'w', content_type='image/jpeg') as f:
            f.write(imageFile)
            f.close()

        blobstore_filename = '/gs' + filename
        return blobstore.create_gs_key(blobstore_filename)
  def get_logs(self, release_id, task_name, log_file='1.log'):
    """Gets the logs for a task from GCS.

    Args:
      release_id: str
      task_name: str
      log_file: str

    Returns:
      Structured log text
    """
    # !!! Note that because of overlapping datetimes and dag_ids, some logs WILL be broken.
    #     This cannot be helped, because of the way which Airflow defines a release !!!
    dag_id, execution_date = release_id_parser(release_id)
    for i, config in enumerate(self._configs):
      dag_run = config.db.query(to_sql_tasks(dag_id, float(time.mktime(execution_date.timetuple()))))
      if len(dag_run):
        execution_date = str(execution_date).replace(' ', 'T')  # put into same format as gcs bucket
        logging.debug('Bucket name: ' + config.bucket_name)
        filename = os.path.join(os.path.sep, config.bucket_name, 'logs', dag_id, task_name, execution_date, log_file)
        logging.info('Retrieving from GCS: ' + str(filename))
        try:
          gcs_file = gcs.open(filename)
          contents = gcs_file.read()
          gcs_file.close()
          return contents
        except:
          continue
Beispiel #7
0
def getFromStorage(bookUrl):

    gcs_file = gcs.open(bookUrl)

    data = gcs_file.read()
    gcs_file.close()

    return data
Beispiel #8
0
 def _write_local_config(self):
     """Writes a local config file to GCS."""
     if not os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'):
         with open(self.local_path, 'r') as stream:
             content = stream.read()
             gcs_file = gcs.open(self.file_path, 'w', content_type='text/plain')
             gcs_file.write(content)
             gcs_file.close()
    def read_file(self, filename):
        self.response.write('Abbreviated file content (first line and last 1K):\n')

        gcs_file = gcs.open(filename)
        self.response.write(gcs_file.readline())
        gcs_file.seek(-1024, os.SEEK_END)
        self.response.write(gcs_file.read())
        gcs_file.close()
    def post(self):
        reportFile = self.request.POST['file_input']
        folder_name = self.request.get('folderName')
        user_name = self.request.get('userName')
        user_id = self.request.get('userId')

        if not reportFile.filename or not folder_name or not user_name or not user_id:
            self.response.set_status(400)
            self.render_json({'reason': 'Not all required parameters found', 'status': 'error'})
            return


        bucket_name = os.environ.get('BUCKET_NAME', 'deepspace9-1134.appspot.com')
        bucket = '/' + bucket_name + '/' + folder_name
        filename = bucket + '/' + reportFile.filename

        # Check if the file already exists in google cloud storage
        exists = True
        try:
            gcs.stat(filename)
        except gcs.NotFoundError:
            exists = False

        if exists:
            # find it in the database and get its key
            files = GCSFile.get_by_gcs_file_name(filename)
            if len(files) == 1:
                db_gcs_file = files[0]
                db_gcs_file.user_id = user_id
                db_gcs_file.user_name = user_name
                db_gcs_file.original_file_name = reportFile.filename
                db_gcs_file.gcs_file_name = filename
                db_gcs_file.timestamp = datetime.now()
                logging.error(db_gcs_file.key.id())
            else:
                self.response.set_status(500)
                self.render_json({'status': 'error', 'key': '', 'reason': 'Unexpected number of files found in the database'})
                return
        else:
            # write a new entry in the DB
            db_gcs_file = GCSFile.save_new(user_id=user_id, user_name=user_name, original_file_name=reportFile.filename,
                                        gcs_file_name=filename)
        # save to database
        db_gcs_file.put()

        # write the file to GCS
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open(filename,
                            'w',
                            content_type=reportFile.type,
                            options={'x-goog-meta-user-name': user_name,
                                     'x-goog-meta-user-id': user_id,
                                     'x-goog-meta-original-name': str(reportFile.filename)},
                            retry_params=write_retry_params)
        gcs_file.write(reportFile.value)
        gcs_file.close()
        # # reply to the app with success and the key
        self.render_json({'status': 'success', 'id': db_gcs_file.key.id()})
Beispiel #11
0
    def _upload_image(self, img_name, img_fh, plaque):
        """
        Upload pic into GCS

        The blobstore.create_gs_key and images.get_serving_url calls are
        outside of the with block; I think this is correct. The
        blobstore.create_gs_key call was erroring out on production when it was
        inside the with block.

        If gcs_fn is specified, overwrite that gcs filename. This is used
        for updating the picture.
        """

        #       Turn this off while Tony Bonomolo is editing:
        #
        #        # Kill old image and URL, if they exist. Tolerate failure in case
        #        # this is a redo:
        #        if plaque.pic is not None:
        #            try:
        #                gcs.delete(plaque.pic)
        #            except:
        #                pass
        #        if plaque.img_url is not None:
        #            try:
        #                images.delete_serving_url(plaque.img_url)
        #            except:
        #                pass

        # Make GCS filename
        date_slash_time = datetime.datetime.now().strftime("%Y%m%d/%H%M%S")
        gcs_filename = '%s/%s/%s' % (GCS_BUCKET, date_slash_time, img_name)
        plaque.pic = gcs_filename

        # TODO openbenches: set plaque.pic to None?
        # TODO openbenches: skip this try block, set plaque.img_url and return immediately?
        # TODO openbenches: set img_url to the hotlink URL?

        # Write image to GCS
        try:
            ct = 'image/jpeg'
            op = {b'x-goog-acl': b'public-read'}
            with gcs.open(gcs_filename, 'w', content_type=ct,
                          options=op) as fh:
                img_contents = img_fh.read()
                fh.write(img_contents)

            # Make serving_url for image:
            blobstore_gs_key = blobstore.create_gs_key('/gs' + gcs_filename)
            plaque.img_url = images.get_serving_url(blobstore_gs_key)

        except AttributeError:
            submit_err = SubmitError(
                "The image for the plaque was not "
                "specified-- please click the back button "
                "and resubmit.")
            logging.error(submit_err)
            raise submit_err
Beispiel #12
0
    def blob_read(self):
        """ read binary blob from google cloud storage """

        try:
            with gcs.open(self.gcs_filename) as f:
                return f.read()
        except gcs.NotFoundError, e:
            logging.warning('GCS file %s NOT FOUND : %s' % (self.gcs_filename, e))
            return None
Beispiel #13
0
 def get(self, lang='ru'):
     import shutil
     key = ndb.Key(Dictionary, lang).get()
     if not key:
         self.abort(404)
     key = key.gcs_key
     if key in self.request.if_none_match:
         self.response.status = 304
         return
     dictionary_file = gcs.open(get_gcs_filename(key))
     shutil.copyfileobj(dictionary_file, self.response)
     dictionary_file.close()
     self.response.etag = key
    def post(self):
        reportFile = self.request.POST['file_input']
        folder_name = self.request.get('folderName')
        bucket_name = os.environ.get('BUCKET_NAME',
                                     'deepspace9-1134.appspot.com')

        # self.response.headers['Content-Type'] = 'text/plain'
        # self.response.write('Demo GCS Application running from Version: '
        #                     + os.environ['CURRENT_VERSION_ID'] + '\n')
        # self.response.write('Using bucket name: ' + bucket_name + '\n\n')

        bucket = '/' + bucket_name + '/' + folder_name
        filename = bucket + '/' + reportFile.filename

        """Create a file.

        The retry_params specified in the open call will override the default
        retry params for this particular file handle.

        Args:
          filename: filename.
        """
        # self.response.write('Creating file %s\n' % filename)

        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open(filename,
                            'w',
                            content_type=reportFile.type,
                            options={'x-goog-meta-foo': 'foo',
                                     'x-goog-meta-bar': 'bar'},
                            retry_params=write_retry_params)
        gcs_file.write(reportFile.value)
        gcs_file.close()
        # # echo the file back
        gcs_file = gcs.open(filename)
        self.response.headers['Content-Type'] = reportFile.type
        self.response.headers['Content-Disposition'] = "attachment; filename=" + str(reportFile.filename)
        self.response.write(gcs_file.read())
        gcs_file.close()
Beispiel #15
0
    def get(self, lang="ru"):
        import shutil

        key = ndb.Key(Dictionary, lang).get()
        if not key:
            self.abort(404)
        key = key.gcs_key
        if key in self.request.if_none_match:
            self.response.status = 304
            return
        dictionary_file = gcs.open(get_gcs_filename(key))
        shutil.copyfileobj(dictionary_file, self.response)
        dictionary_file.close()
        self.response.etag = key
Beispiel #16
0
    def post(self):
      try:
        gurl = self.request.get('imageURL')
        # print("urlColor: " +self.request.get('color'))
        gcolor = float(self.request.get('color'))
        gbrightness = float(self.request.get('brightness'))
        gcontrast = float(self.request.get('contrast'))
        gsharpness = float(self.request.get('sharpness'))
        grotate = int(self.request.get('rotate'))
        gImFLSelected = bool(self.request.get('lucky'))
        c = urlfetch.fetch(gurl, deadline=10).content
        im = Image.open(StringIO(c))
        mimeType = im.format
        try:
          if gImFLSelected:
            img = images.Image(c)
            img.im_feeling_lucky()
            data = img.execute_transforms(output_encoding=images.JPEG)
          else: 
            enh = ImageEnhance.Color(im) # 0 - 2 to be considered
            out = enh.enhance(gcolor)
            enh = ImageEnhance.Brightness(out) # 0 - black image, 1 - original image; Can give more than 1.0
            out = enh.enhance(gbrightness)
            enh = ImageEnhance.Contrast(out) # 0 - solid grey image, 1 - original image
            out = enh.enhance(gcontrast)
            enh = ImageEnhance.Sharpness(out) # 0 - blurred image, 1 - original image, 2 - sharpened image
            out = enh.enhance(gsharpness) 
            out = out.rotate(grotate, resample=Image.BICUBIC, expand=True)
            buf = cStringIO.StringIO()
            out.save(buf, mimeType)
            data = buf.getvalue()
            
          bucket_name = 'dem-ode'
          bucket = '/' + bucket_name
            
          filename = bucket + '/' + urllib.quote(u"{0}".format(time.time()).encode('utf8'))
          with gcs.open(filename, 'w') as f:
            f.write(data)
          blobstore_filename = "/gs"+filename
          # this is needed if you want to continue using blob_keys.
          ieurl = images.get_serving_url(blobstore.BlobKey(blobstore.create_gs_key(str(blobstore_filename))))
#        
          self.response.out.write('<img width="100%" height="100%" src="' + ieurl +'"/>')
          self.response.out.write('<a style="float:right;position:absolute;top:8px;right:8px;width:48px;height:48px" href="' + ieurl + '" download>'+'<img style="width: 48px;height: 48px;" src="https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Download-Icon.png/480px-Download-Icon.png"></a>')
        except:
          self.response.headers[b'Content-Type'] = b'text/plain'
          self.response.out.write('Image size is too large. Can\'t handle')
      except:
        self.response.headers[b'Content-Type'] = b'text/plain'
        self.response.out.write('Image size is too large. Can\'t handle')
    def _upload_image(self, img_name, img_fh, plaque):
        """
        Upload pic into GCS

        The blobstore.create_gs_key and images.get_serving_url calls are
        outside of the with block; I think this is correct. The
        blobstore.create_gs_key call was erroring out on production when it was
        inside the with block.

        If gcs_fn is specified, overwrite that gcs filename. This is used
        for updating the picture.
        """

#       Turn this off while Tony Bonomolo is editing:
#
#        # Kill old image and URL, if they exist. Tolerate failure in case
#        # this is a redo:
#        if plaque.pic is not None:
#            try:
#                gcs.delete(plaque.pic)
#            except:
#                pass
#        if plaque.img_url is not None:
#            try:
#                images.delete_serving_url(plaque.img_url)
#            except:
#                pass

        # Make GCS filename
        date_slash_time = datetime.datetime.now().strftime("%Y%m%d/%H%M%S")
        gcs_filename = '%s/%s/%s' % (GCS_BUCKET, date_slash_time, img_name)
        plaque.pic = gcs_filename

        # Write image to GCS
        try:
            ct, op = self._gcs_extras(img_name)
            with gcs.open(gcs_filename, 'w', content_type=ct, options=op) as fh:
                img_contents = img_fh.read()
                fh.write(img_contents)
        except AttributeError:
            submit_err = SubmitError("The image for the plaque was not "
                                     "specified-- please click the back button "
                                     "and resubmit.")
            logging.error(submit_err)
            raise submit_err

        # Make serving_url for image:
        blobstore_gs_key = blobstore.create_gs_key('/gs' + gcs_filename)
        plaque.img_url = images.get_serving_url(blobstore_gs_key)
Beispiel #18
0
def _fetch_image(url_base64, cache_info):
    url = base64.urlsafe_b64decode(url_base64)
    logging.info("image url: %s", url)
    headers = {}
    if cache_info:
        logging.info("image was cached")
        headers['If-Modified-Since'] = cache_info.last_modified

    response = urlfetch.fetch(url, headers=headers)
    logging.info("image fetched, status is %s", response.status_code)
    if response.status_code == 200:  # new or updated image
        logging.info("image is new or modified")
        image_data = response.content

        # resize
        image = Image.open(StringIO.StringIO(image_data))
        width, height = image.size
        logging.info("image size is %sx%s", width, height)
        if width > MAX_SIZE or height > MAX_SIZE:
            ratio = min(MAX_SIZE / width, MAX_SIZE / height)
            new_size = int(width * ratio), int(height * ratio)
            logging.info("resizing to %sx%s", *new_size)
            image = image.resize(new_size, Image.ANTIALIAS)

        # save to GCS
        filename = "/%s/%s" % (_BUCKET, url_base64)
        image_file = cloudstorage.open(filename, "w", "image/png")
        image.save(image_file, "PNG")
        image_file.close()

        # get serving url
        blob_key = blobstore.create_gs_key("/gs" + filename)
        serving_url = images.get_serving_url(blob_key, size=max(image.size))
        if serving_url.startswith('https'):
            serving_url = 'http' + serving_url[5:]

        # save cache info
        cache_info = ImageCache(
            id=url_base64,
            last_modified=response.headers["Last-Modified"],
            serving_url=serving_url)
        cache_info.put()
    elif response.status_code == 304:
        logging.info("image not modified")
        cache_info.put()  # refresh cache_info.updated
    else:
        return None
    return cache_info
Beispiel #19
0
    def blob_write(self, blob):
        """ update google cloud storage bf entity """

        content_type = mimetypes.guess_type(self.filename)[0]
        if not content_type:
            logging.warning('Mimetype not guessed for: %s', self.filename)

        if content_type and self.extension in config.UTF_8_FILE_EXTENSIONS:
            content_type += b'; charset=utf-8'
        try:
            with gcs.open(self.gcs_filename, 'w', content_type=content_type or b'binary/octet-stream',
                          options={b'x-goog-acl': b'public-read'}) as f:
                f.write(blob)
            return self.gcs_filename
        except Exception, e:
            raise Exception('Blob write failed for %s, exception: %s. Additional info was logged' % (self.filename, str(e)))
Beispiel #20
0
def blob_archive(new_bf=None):
    """ bonus: save all GCSFile in a zip archive """

    @ndb.tasklet
    def callback(bf_key):
        """ key_only query and get() lookup for entity consistency """

        bf = yield bf_key.get_async()
        raise ndb.Return(bf)

    def blobfiles(insert, archive_key):
        """ We do not use ancestor queries. This Generator takes care of index and entity inconsistencies
            https://cloud.google.com/developers/articles/balancing-strong-and-eventual-consistency-with-google-cloud-datastore/
        """

        for bf in GCSFile.query().filter(GCSFile.key != archive_key).map(callback, keys_only=True):
            if insert and new_bf.key == bf.key:
                insert = False  # no index inconsistency
            yield bf

        # if the new_bf entity is not yet present in GCSFile (due to index inconsistencies), it will be inserted here
        if insert:
            yield new_bf

    # add all files to archive, except the archive zipfile itself which has a reserved name (GCSFile key)
    (archive_folder, _, archive_file) = config.ARCHIVE_PATH.rpartition('/')

    if new_bf and new_bf.filename != archive_file:

        new_zf = GCSFile.new(archive_file, folder=archive_folder)
        with gcs.open(new_zf.gcs_filename, 'w', content_type=b'multipart/x-zip',
                      options={b'x-goog-acl': b'public-read', b'cache-control': b'private, max-age=0, no-cache'}) as nzf:

            # nzf is a cloudstorage.storage_api.StreamingBuffer, which can be pickled to append data in a chained task
            with zipfile.ZipFile(nzf, 'w') as zf:
                for each in blobfiles(new_bf is not None, new_zf.key):
                    # We also could have used : each.blob_read()
                    logging.info(each.filename)
                    blob = each.blob_reader().read()
                    zf.writestr(each.filename.encode('utf-8'), blob)

        new_zf.put_async()
    else:
        new_zf = new_bf

    return new_zf
Beispiel #21
0
    def post(self):
        unicorn = self.request.get('img')
        img_name = self.request.get('img_name')
        stream_id = self.request.get('stream_id')
        stream_tags = self.request.get('tags')

        tag_list = re.findall(r'#\w+', stream_tags)

        lon = float(self.request.get('lon'))
        lat = float(self.request.get('lat'))
        img_comment = "comment"

        unicorn = images.resize(unicorn, 500, 500)
        # find the right bucket-stream path
        b = "/pigeonhole-apt.appspot.com/" + str(stream_id)

        # much be post!
        img_real_name = self.request.POST['img'].filename
        pat = "(.+)\.(.+)"
        img_real_type = re.match(pat, str(img_real_name)).group(2)

        # construct a new content_type
        content_type_value = "image/" + str(img_real_type).lower()

        # create such file and write to it
        gcs_file = gcs.open(b + "/" + str(img_name) + "." + str(img_real_type),
                            'w',
                            content_type=content_type_value)
        gcs_file.write(unicorn)
        gcs_file.close()

        # generate the public url
        # not test yet TODO: test the url
        unicorn_url = "https://storage.googleapis.com/pigeonhole-apt.appspot.com/" \
                      + str(stream_id) + "/" + str(img_name) + "." + str(img_real_type).lower()
        # back to ndb server
        ops.create_image(img_comment, img_name, unicorn_url, stream_id, lon,
                         lat)
        ops.update_stream_tag(stream_id, tag_list)

        # redirect the user to the view single page
        self.redirect(
            str("https://pigeonhole-apt.appspot.com/view_single?stream_id=" +
                str(stream_id)))
Beispiel #22
0
 def get(self, photo_key):
         if not blobstore.get(photo_key):
             self.error(404)
         else:
             try:
               blob_info = blobstore.BlobInfo.get(photo_key)
               im = Image.open(blob_info.open())
               iR = ndb.Key(ImageData, '123').get()
               mimeType = im.format
               try:
                 if iR.gImFLSelected:
                   img = images.Image(blob_key = iR.imageKey)
                   img.im_feeling_lucky()
                   data = img.execute_transforms(output_encoding=images.JPEG)
                 else:
                   enh = ImageEnhance.Color(im) # 0 - 2 to be considered
                   out = enh.enhance(iR.gColor)
                   enh = ImageEnhance.Brightness(out) # 0 - black image, 1 - original image; Can give more than 1.0
                   out = enh.enhance(iR.gBrightness)
                   enh = ImageEnhance.Contrast(out) # 0 - solid grey image, 1 - original image
                   out = enh.enhance(iR.gContrast)
                   enh = ImageEnhance.Sharpness(out) # 0 - blurred image, 1 - original image, 2 - sharpened image
                   out = enh.enhance(iR.gSharpness) 
                   out = out.rotate(iR.gRotate, resample=Image.BICUBIC, expand=True)
                   buf = cStringIO.StringIO()
                   out.save(buf, mimeType)
                   data = buf.getvalue()
                 bucket_name = 'dem-ode'
                 bucket = '/' + bucket_name
                 filename = bucket + '/' + urllib.quote(u"{0}".format(time.time()).encode('utf8'))
                 with gcs.open(filename, 'w') as f:
                   f.write(data)
                 blobstore_filename = "/gs"+filename
                 #this is needed if you want to continue using blob_keys.
                 ieurl = images.get_serving_url(blobstore.BlobKey(blobstore.create_gs_key(str(blobstore_filename))))
                 self.response.out.write('<img width="100%" height="100%" src="' + ieurl +'"/>')
                 self.response.out.write('<a style="float:right;position:absolute;top:8px;right:8px;width:48px;height:48px" href="' + ieurl + '" download>'+'<img style="width: 48px;height: 48px;" src="https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Download-Icon.png/480px-Download-Icon.png"></a>')
               except:
                     self.response.headers[b'Content-Type'] = b'text/plain'
                     self.response.out.write('Image size is too large. Can\'t handle')
             except:
                 self.response.headers[b'Content-Type'] = b'text/plain'
                 self.response.out.write('Image size is too large. Can\'t handle')
Beispiel #23
0
 def post(self):
     for dictionary in Dictionary.query():
         data_object = []
         key = str(int(time.time()))
         words = GlobalDictionaryWord.query(GlobalDictionaryWord.deleted == False,
                                            GlobalDictionaryWord.lang == dictionary.key.id()).order(GlobalDictionaryWord.E).fetch()
         chunk_size = len(words) // 100
         for i, word in enumerate(words):
             data_object.append({"word": word.word,
                                 "diff": i // chunk_size,
                                 "used": word.used_times,
                                 "tags": word.tags})
         output_file = gcs.open(get_gcs_filename(key), "w", "application/json")
         json.dump(data_object, output_file)
         output_file.close()
         old_key = dictionary.gcs_key
         dictionary.gcs_key = key
         dictionary.put()
         if old_key:
             gcs.delete(get_gcs_filename(old_key))
Beispiel #24
0
def uploadToStorage(bookName, bookText):

    bucket_name = os.environ.get('BUCKET_NAME',
                                 app_identity.get_default_gcs_bucket_name())

    bucket = '/' + bucket_name

    filename = bucket + '/books/' + str(datetime.now()) + '/' + bookName

    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    gcs_file = gcs.open(filename,
                        'w',
                        content_type='text/plain',
                        retry_params=write_retry_params,
                        options={'x-goog-acl': 'public-read'})

    gcs_file.write(bookText)
    gcs_file.close()

    return filename
Beispiel #25
0
    def get(self, _id, img_type):
        self.response.headers['Access-Control-Allow-Origin'] = "*"

        try:
            bucket_name = os.environ.get(
                'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
            if img_type == 'p':
                filename = '/%s/%s/%s' % (bucket_name, FOLDER_PHOTOS, _id)
            if img_type == 'm':
                filename = '/%s/%s/%s' % (bucket_name, FOLDER_MASKS, _id)

            stat = gcs.stat(filename)
            self.response.headers['Content-Type'] = stat.content_type
            self.response.headers['Content-Length'] = stat.st_size
            self.response.headers['etag'] = stat.etag
            gcs_file = gcs.open(filename)
            self.response.write(gcs_file.read())
            gcs_file.close()
        except Exception, e:
            self.response.set_status('400')
            self.response.write(e.message)
Beispiel #26
0
    def post(self):

        # ファイル名とファイルデータを得る
        file_name = self.request.POST["file_data"].filename
        file_data = self.request.get("file_data")

        # このアプリケーションのGCSバケット名を得る
        bucket_name = app_identity.get_default_gcs_bucket_name()

        # 保存パスを作成
        filepath = "/" + bucket_name + "/file_save/" + file_name

        # ファイル作成
        gcs_file = gcs.open(filepath, "w")
        gcs_file.write(file_data)
        gcs_file.close()

        gcs_key = blobstore.create_gs_key("/gs" + filepath)

        self.set_template_value("message", gcs_key)
        self.draw_template("front/file_save/done.html")
Beispiel #27
0
    def post(self):

        # ファイル名とファイルデータを得る
        file_name = self.request.POST['file_data'].filename
        file_data = self.request.get('file_data')

        # このアプリケーションのGCSバケット名を得る
        bucket_name = app_identity.get_default_gcs_bucket_name()

        # 保存パスを作成
        filepath = '/' + bucket_name + '/file_save/' + file_name

        # ファイル作成
        gcs_file = gcs.open(filepath, 'w')
        gcs_file.write(file_data)
        gcs_file.close()

        gcs_key = blobstore.create_gs_key('/gs' + filepath)

        self.set_template_value('message', gcs_key)
        self.draw_template('front/file_save/done.html')
Beispiel #28
0
    def post (self):
        user_id = self.request.get ('user')
        print 'task queue recevied user id: %s' % user_id

        album_key = self.request.get ('album')
        fb_albums = json.loads (self.request.get ('fb_albums'))
        images = []

        user = User.get_user_by_id (user_id)
        album = ndb.Key (Album, int(album_key), parent=user.key).get ()

        graph = facebook.GraphAPI (user.access_token)

        albums_count = len(fb_albums)

        for fb_album in fb_albums:
            images_list = graph.get_object ("%s/photos" % fb_album["id"])

            if images_list:
                for image in images_list["data"]:
                    imgdata = cStringIO.StringIO(urllib2.urlopen(image["images"][0]["source"]).read ())

                    filename = "/photos/%s_photo" % image["id"]
                    with gcs.open (filename, 'w') as photo:
                        photo.write (imgdata.getvalue ())
                
                    blobkey = blobstore.create_gs_key ("/gs%s" % filename)
                
                    images.append (
                        Photo (
                            id=image["id"],
                            payload = blobstore.BlobKey (blobkey),
                            width=image["images"][0]["width"],
                            height=image["images"][0]["height"]
                        )
                    )

        album.images = images
        album.put ()
 def get(self):
     file_id = self.request.get('fileId')
     if file_id and file_id.isdigit():
         # get the gcs_file_name from the database
         db_gcs_file = GCSFile.get(int(file_id))
         if db_gcs_file:
             # check if it exists in gcs
             try:
                 stat = gcs.stat(db_gcs_file.gcs_file_name)
             except gcs.NotFoundError:
                 self.response.set_status(404)
                 self.write('404: This file does not exist')
             else:
                 # return it if it does
                 gcs_file = gcs.open(db_gcs_file.gcs_file_name)
                 self.response.headers['Content-Type'] = stat.content_type
                 self.response.headers['Content-Disposition'] = "attachment; filename=" + stat.metadata[
                     'x-goog-meta-original-name']
                 self.response.write(gcs_file.read())
                 gcs_file.close()
         else:
             self.response.set_status(404)
             self.write('404: This file does not exist')
    def create_file(self, filename):

        """Create a file.

        The retry_params specified in the open call will override the default
        retry params for this particular file handle.

        Args:
          filename: filename.
        """
        self.response.write('Creating file %s\n' % filename)

        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        gcs_file = gcs.open(filename,
                            'w',
                            content_type='text/plain',
                            options={'x-goog-meta-foo': 'foo',
                                     'x-goog-meta-bar': 'bar'},
                            retry_params=write_retry_params)
        gcs_file.write('abcde\n')
        gcs_file.write('f' * 1024 * 4 + '\n')
        gcs_file.close()
        self.tmp_filenames_to_clean_up.append(filename)
Beispiel #31
0
    def _upload_to_gcs(self, bucket_url, file_data, check_box):
        """Create a file.

        The retry_params specified in the open call will override the default
        retry params for this particular file handle.

        Args:
          bucket_url: filename on GCS.
          file_data: raw file data.
          check_box: boolean showing status of the checkbox in the form.
        """
        self.response.write('Creating file %s\n' % bucket_url)
        write_retry_params = gcs.RetryParams(backoff_factor=1.1)
        options = {
            'x-goog-acl': 'public-read',
            'x-goog-meta-public': str(check_box)
        }
        gcs_file = gcs.open(bucket_url,
                            'w',
                            options=options,
                            retry_params=write_retry_params)
        gcs_file.write(file_data)
        gcs_file.close()
Beispiel #32
0
 def post(self):
     for dictionary in Dictionary.query():
         data_object = []
         key = str(int(time.time()))
         words = (
             GlobalDictionaryWord.query(
                 GlobalDictionaryWord.deleted == False, GlobalDictionaryWord.lang == dictionary.key.id()
             )
             .order(GlobalDictionaryWord.E)
             .fetch()
         )
         chunk_size = len(words) // 100
         for i, word in enumerate(words):
             data_object.append(
                 {"word": word.word, "diff": i // chunk_size, "used": word.used_times, "tags": word.tags}
             )
         output_file = gcs.open(get_gcs_filename(key), "w", "application/json")
         json.dump(data_object, output_file)
         output_file.close()
         old_key = dictionary.gcs_key
         dictionary.gcs_key = key
         dictionary.put()
         if old_key:
             gcs.delete(get_gcs_filename(old_key))
Beispiel #33
0
 def get_tree(self, request):
     filename = '/' + bucket_name + '/tree.json'
     gcs_file = gcs.open(filename)
     tree = gcs_file.read()
     gcs_file.close()
     return TreeMessage(tree=tree.decode('utf-8').encode('unicode-escape'))
Beispiel #34
0
    def post(self):
        try:
            #image_file = self.request.get('image', default_value=None)
            data = json.loads(self.request.body)
            ext = data.get('type')

            # mime type must be present and must be png or gif
            if ext is None or ext not in ['png', 'gif']:
                raise Exception('invalid format')

            # image must be present
            if data.get('image', None) is None:
                raise Exception('missing image')

            image = base64.b64decode(
                data.get('image').replace('data:image/%s;base64,' % ext, ''))
            ip = self.request.remote_addr

            credits = data.get('credits', '')
            tags = data.get('tags', [])
            lang = data.get('lang', 'en')
            audience = int(data.get('audience', 1))
            is_safe = True if audience <= 2 else False
            email = data.get('email', None)

            # flip image horizontally
            # image_object = images.Image(image)
            # image_object.horizontal_flip()
            # image = image_object.execute_transforms()

            # caclulate mask id
            mask_id = hashlib.sha1(image).hexdigest()
            print mask_id

            # check if mask exists
            if Mask.get_by_id(mask_id) is not None:
                self.response.headers['Access-Control-Allow-Origin'] = "*"
                self.response.set_status('304')
                return

            # if len(tags) == 0:
            #     raise Exception('missing tags')

            tags_list = []

            # if there are new tags create them
            for tag in tags:
                tag = Tag.get_or_create(tag, source='MASK', lang=lang)
                tags_list.append(str(tag.key.id()))

            # save it on cloud storage
            bucket_name = os.environ.get(
                'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
            filename = '%s.%s' % (mask_id, ext)
            gs_filename = '/%s/%s/%s' % (bucket_name, FOLDER_MASKS, filename)
            gcs_file = gcs.open(gs_filename,
                                'w',
                                content_type='image/%s' % ext,
                                options={
                                    'x-goog-meta-foo': 'foo',
                                    'x-goog-meta-bar': 'bar'
                                })
            gcs_file.write(image)
            gcs_file.close()

            mask = Mask(id=mask_id)
            mask.populate(filename=filename,
                          ip=ip,
                          audience=audience,
                          email=email,
                          tags=tags_list,
                          credits=credits,
                          is_safe=is_safe)
            mask.put()

            # memcache
            memcache.set(mask_id, Mask.to_json_object(mask))

            response_data = {
                "id": mask_id,
                "mask": "/masks/%s" % mask_id,
                "url": "/m/%s" % filename
            }

            self.response.headers['Access-Control-Allow-Origin'] = "*"
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps(response_data))
        except Exception, e:
            self.response.headers['Access-Control-Allow-Origin'] = "*"
            self.response.set_status('400')
            self.response.write(e.message)
Beispiel #35
0
def read_metrics(metric):
  gcs_file = gcs.open(METRICS_DIR + metric + '.json')
  data = gcs_file.read()
  gcs_file.close()
  return data
Beispiel #36
0
 def read_file(self, filename):
     gcs_file = gcs.open(filename)
     the_file = gcs_file.read()
     gcs_file.close()
     return the_file, self.get_content_type(filename)
Beispiel #37
0
def write_metrics(metric, data):
  gcs_file = gcs.open(METRICS_DIR + metric + '.json', 'w')
  gcs_file.write(json.dumps(data))
  gcs_file.close()
Beispiel #38
0
    def post(self):
        try:
            # get all parameters
            data = json.loads(self.request.body)
            ext = data.get('type')

            # mime type must be present and must be png or gif
            if ext is None or ext not in ['png', 'gif']:
                raise Exception('invalid format')

            # image must be present
            image = data.get('image', None)
            if image is None:
                raise Exception('missing image')

            # parse image
            image = base64.b64decode(
                image.replace('data:image/%s;base64,' % ext, ''))

            # source ip
            ip = self.request.remote_addr

            email = data.get('email', '')
            tags = data.get('tags', [])
            lang = data.get('lang', 'en')
            audience = int(data.get('audience', 1))
            masks = [str(x) for x in data.get('masks', [])]

            # mask must be present
            if len(masks) == 0:
                raise Exception('masks is empty')

            # caclulate photo id
            photo_id = hashlib.sha1(image).hexdigest()
            photo = Photo.get_by_id(photo_id)

            # check if phot exists
            if photo is not None:
                self.response.headers['Access-Control-Allow-Origin'] = "*"
                self.response.set_status('304')
                return

            tags_list = []

            # if there are new tags create them
            for tag in tags:
                tag = Tag.get_or_create(tag, source='USER', lang=lang)
                tags_list.append(tag.key.id())

            # find the other tags in the masks
            for mask_id in masks:
                if mask_id == 0:  # test mask
                    continue
                mask = Mask.get_by_id(mask_id)

                if mask is not None:
                    tags_list.extend(mask.tags)
                    mask.photo_count += 1
                    mask.put()

                    # set the minumum audience to the maximum of the mask audience
                    if mask.audience > audience:
                        audience = mask.audience

            # now understand if it is safe or not
            is_safe = True if int(audience) <= 2 else False

            tags_list = list(set(tags_list))

            # save it on cloud storage
            bucket_name = os.environ.get(
                'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

            filename = '%s.%s' % (photo_id, ext)
            gs_filename = '/%s/%s/%s' % (bucket_name, FOLDER_PHOTOS, filename)

            gcs_file = gcs.open(gs_filename,
                                'w',
                                content_type='image/%s' % ext,
                                options={
                                    'x-goog-acl': 'public-read',
                                    'x-goog-meta-foo': 'foo'
                                })
            gcs_file.write(image)
            gcs_file.close()
            # create a key for the stored file
            gs_key = blobstore.create_gs_key('/gs' + gs_filename)
            # get the cache url for the image
            serving_url = images.get_serving_url(gs_key)

            photo = Photo(id=photo_id)
            photo.populate(filename=filename,
                           ip=ip,
                           audience=audience,
                           masks=masks,
                           tags=tags_list,
                           email=email,
                           ext=ext,
                           thumb=serving_url,
                           is_safe=is_safe)
            photo.put()

            response_data = {
                "id": photo_id,
                "blowout": "/blowout/%s" % photo_id,
                "image": "/p/%s" % photo.filename,
                "thumb": photo.thumb
            }

            # memcache
            json_photo = Photo.to_json_object(photo)
            memcache.set(photo.key.id(), json_photo)

            self.response.headers['Access-Control-Allow-Origin'] = "*"
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps(response_data))
        except Exception, e:
            logging.info(e)
            self.response.headers['Access-Control-Allow-Origin'] = "*"
            self.response.set_status('400')
            self.response.write(e.message)
Beispiel #39
0
def create_stream_in_storage(stream_name):
    bucket = "/pigeonhole-apt.appspot.com/"
    filename = bucket + str(stream_name) + "/"
    gcs_file = gcs.open(filename, 'w')
    gcs_file.close()
 def get(self, photo_key):
     if not blobstore.get(photo_key):
         self.error(404)
     else:
         try:
             blob_info = blobstore.BlobInfo.get(photo_key)
             im = Image.open(blob_info.open())
             iR = ndb.Key(ImageData, '123').get()
             mimeType = im.format
             try:
                 if iR.gImFLSelected:
                     img = images.Image(blob_key=iR.imageKey)
                     img.im_feeling_lucky()
                     data = img.execute_transforms(
                         output_encoding=images.JPEG)
                 else:
                     enh = ImageEnhance.Color(im)  # 0 - 2 to be considered
                     out = enh.enhance(iR.gColor)
                     enh = ImageEnhance.Brightness(
                         out
                     )  # 0 - black image, 1 - original image; Can give more than 1.0
                     out = enh.enhance(iR.gBrightness)
                     enh = ImageEnhance.Contrast(
                         out)  # 0 - solid grey image, 1 - original image
                     out = enh.enhance(iR.gContrast)
                     enh = ImageEnhance.Sharpness(
                         out
                     )  # 0 - blurred image, 1 - original image, 2 - sharpened image
                     out = enh.enhance(iR.gSharpness)
                     out = out.rotate(iR.gRotate,
                                      resample=Image.BICUBIC,
                                      expand=True)
                     buf = cStringIO.StringIO()
                     out.save(buf, mimeType)
                     data = buf.getvalue()
                 bucket_name = 'dem-ode'
                 bucket = '/' + bucket_name
                 filename = bucket + '/' + urllib.quote(u"{0}".format(
                     time.time()).encode('utf8'))
                 with gcs.open(filename, 'w') as f:
                     f.write(data)
                 blobstore_filename = "/gs" + filename
                 #this is needed if you want to continue using blob_keys.
                 ieurl = images.get_serving_url(
                     blobstore.BlobKey(
                         blobstore.create_gs_key(str(blobstore_filename))))
                 self.response.out.write(
                     '<img width="100%" height="100%" src="' + ieurl +
                     '"/>')
                 self.response.out.write(
                     '<a style="float:right;position:absolute;top:8px;right:8px;width:48px;height:48px" href="'
                     + ieurl + '" download>' +
                     '<img style="width: 48px;height: 48px;" src="https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Download-Icon.png/480px-Download-Icon.png"></a>'
                 )
             except:
                 self.response.headers[b'Content-Type'] = b'text/plain'
                 self.response.out.write(
                     'Image size is too large. Can\'t handle')
         except:
             self.response.headers[b'Content-Type'] = b'text/plain'
             self.response.out.write(
                 'Image size is too large. Can\'t handle')
Beispiel #41
0
    def post (self):
        user_id = self.request.get ('user')
        print 'Create cover for  user id: %s' % user_id

        album_key = self.request.get ('album')
        fb_album = self.request.get ('fb_album')
        images = []

        user = User.get_user_by_id (user_id)
        album = ndb.Key (Album, int(album_key), parent=user.key).get ()

        graph = facebook.GraphAPI (user.access_token)
        pictures = graph.get_object ("%s/photos" % fb_album, fields="picture",
                limit=4)

        if pictures:
            bg = imaging.new ('RGBA', (200, 200))
            position = (
                (0, 0, 100, 100),
                (0, 100, 100, 200),
                (100, 0, 200, 100),
                (100, 100, 200, 200)
            )

            try:
                index = 0
                for picture in pictures["data"]: 
                    data = cStringIO.StringIO(urllib2.urlopen(picture["picture"]).read ())
                    img = imaging.open (data)
                    
                    ratio = float (max (img.size)) / float (min (img.size))
                    size = int (img.size[0] * ratio), int (img.size[1] * ratio)
                    oldsize = img.size

                    diff = tuple (x - y for x, y in zip (size, oldsize))

                    img = img.resize (size)
                    thumbnail = img.crop ((diff[0] / 2, diff[1] / 2, 100
                        + diff[0] / 2, 100 + diff[1] /2 ))

                    padding = [(100 - thumbnail.size[0]) / 2, (100 -
                        thumbnail.size[1]) / 2]

                    bg.paste (thumbnail, (position[index][0] + padding[0],
                        position[index][1] + padding[1], position[index][2] -
                        padding[0], position[index][3] - padding[1]))
                    index += 1
                    
                    if index > 4:
                        break

                filename = "/covers/%s_cover" % album.key
                with gcs.open (filename, 'w') as cover:
                    bg.save (cover, 'PNG')
                
                blobkey = blobstore.create_gs_key ("/gs%s" % filename)
                album.cover = blobstore.BlobKey (blobkey)

                album.put ()
            except ValueError as e:
                raise e
    def post(self):
        try:
            gurl = self.request.get('imageURL')
            # print("urlColor: " +self.request.get('color'))
            gcolor = float(self.request.get('color'))
            gbrightness = float(self.request.get('brightness'))
            gcontrast = float(self.request.get('contrast'))
            gsharpness = float(self.request.get('sharpness'))
            grotate = int(self.request.get('rotate'))
            gImFLSelected = bool(self.request.get('lucky'))
            c = urlfetch.fetch(gurl, deadline=10).content
            im = Image.open(StringIO(c))
            mimeType = im.format
            try:
                if gImFLSelected:
                    img = images.Image(c)
                    img.im_feeling_lucky()
                    data = img.execute_transforms(output_encoding=images.JPEG)
                else:
                    enh = ImageEnhance.Color(im)  # 0 - 2 to be considered
                    out = enh.enhance(gcolor)
                    enh = ImageEnhance.Brightness(
                        out
                    )  # 0 - black image, 1 - original image; Can give more than 1.0
                    out = enh.enhance(gbrightness)
                    enh = ImageEnhance.Contrast(
                        out)  # 0 - solid grey image, 1 - original image
                    out = enh.enhance(gcontrast)
                    enh = ImageEnhance.Sharpness(
                        out
                    )  # 0 - blurred image, 1 - original image, 2 - sharpened image
                    out = enh.enhance(gsharpness)
                    out = out.rotate(grotate,
                                     resample=Image.BICUBIC,
                                     expand=True)
                    buf = cStringIO.StringIO()
                    out.save(buf, mimeType)
                    data = buf.getvalue()

                bucket_name = 'dem-ode'
                bucket = '/' + bucket_name

                filename = bucket + '/' + urllib.quote(u"{0}".format(
                    time.time()).encode('utf8'))
                with gcs.open(filename, 'w') as f:
                    f.write(data)
                blobstore_filename = "/gs" + filename
                # this is needed if you want to continue using blob_keys.
                ieurl = images.get_serving_url(
                    blobstore.BlobKey(
                        blobstore.create_gs_key(str(blobstore_filename))))
                #
                self.response.out.write(
                    '<img width="100%" height="100%" src="' + ieurl + '"/>')
                self.response.out.write(
                    '<a style="float:right;position:absolute;top:8px;right:8px;width:48px;height:48px" href="'
                    + ieurl + '" download>' +
                    '<img style="width: 48px;height: 48px;" src="https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Download-Icon.png/480px-Download-Icon.png"></a>'
                )
            except:
                self.response.headers[b'Content-Type'] = b'text/plain'
                self.response.out.write(
                    'Image size is too large. Can\'t handle')
        except:
            self.response.headers[b'Content-Type'] = b'text/plain'
            self.response.out.write('Image size is too large. Can\'t handle')
Beispiel #43
0
def store_thumbnail_in_gcs(self, thumbnail_key, thumbnail):
    write_retry_params = gcs.RetryParams(backoff_factor=1.1)
    filename = '/' + THUMBNAIL_BUCKET + '/' + thumbnail_key
    with gcs.open(filename, 'w') as filehandle:
        filehandle.write(thumbnail)
Beispiel #44
0
 def _load_config(self):
     """Loads the contents of the config file from GCS."""
     gcs_file = gcs.open(self.file_path)
     contents = gcs_file.read()
     gcs_file.close()
     self.contents = yaml.load(contents)