Ejemplo n.º 1
0
def StoreObjectInGCS(
    content, mime_type, project_id, thumb_width=DEFAULT_THUMB_WIDTH,
    thumb_height=DEFAULT_THUMB_HEIGHT, filename=None):
  bucket_name = app_identity.get_default_gcs_bucket_name()
  guid = uuid.uuid4()
  object_id = '/%s/attachments/%s' % (project_id, guid)
  object_path = '/' + bucket_name + object_id
  options = {}
  if filename:
    if not framework_constants.FILENAME_RE.match(filename):
      logging.info('bad file name: %s' % filename)
      filename = 'attachment.dat'
    options['Content-Disposition'] = 'inline; filename="%s"' % filename
  logging.info('Writing with options %r', options)
  with cloudstorage.open(object_path, 'w', mime_type, options=options) as f:
    f.write(content)

  if mime_type in RESIZABLE_MIME_TYPES:
    # Create and save a thumbnail too.
    thumb_content = None
    try:
      thumb_content = images.resize(content, thumb_width, thumb_height)
    except images.LargeImageError:
      # Don't log the whole exception because we don't need to see
      # this on the Cloud Error Reporting page.
      logging.info('Got LargeImageError on image with %d bytes', len(content))
    except Exception, e:
      # Do not raise exception for incorrectly formed images.
      # See https://bugs.chromium.org/p/monorail/issues/detail?id=597 for more
      # detail.
      logging.exception(e)
    if thumb_content:
      thumb_path = '%s-thumbnail' % object_path
      with cloudstorage.open(thumb_path, 'w', 'image/png') as f:
        f.write(thumb_content)
Ejemplo n.º 2
0
    def testStoreObjectInGCS_NotResizableMimeType(self):
        guid = 'aaaaa'
        project_id = 100
        object_id = '/%s/attachments/%s' % (project_id, guid)
        bucket_name = 'test_bucket'
        object_path = '/' + bucket_name + object_id
        mime_type = 'not_resizable_mime_type'
        content = 'content'

        self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
        app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)

        self.mox.StubOutWithMock(uuid, 'uuid4')
        uuid.uuid4().AndReturn(guid)

        self.mox.StubOutWithMock(cloudstorage, 'open')
        cloudstorage.open(object_path, 'w',
                          mime_type).AndReturn(fake.FakeFile())

        self.mox.ReplayAll()

        ret_id = gcs_helpers.StoreObjectInGCS(content, mime_type, project_id,
                                              gcs_helpers.DEFAULT_THUMB_WIDTH,
                                              gcs_helpers.DEFAULT_THUMB_HEIGHT)
        self.mox.VerifyAll()
        self.assertEquals(object_id, ret_id)
Ejemplo n.º 3
0
  def post(self):
    trace_uuid = str(uuid.uuid4())
    if 'GCS_BUCKET_NAME' not in os.environ:
      bucket_name = app_identity.get_default_gcs_bucket_name()
    else:
      bucket_name = os.environ['GCS_BUCKET_NAME']
    gcs_path = ('/' + bucket_name + '/' + trace_uuid + '.gz')
    gcs_file = gcs.open(gcs_path,
                        'w',
                        content_type='application/octet-stream',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(self.request.get('trace'))
    gcs_file.close()

    trace_object = trace_info.TraceInfo(id=trace_uuid)
    trace_object.prod = self.request.get('prod')
    trace_object.network_type = self.request.get('network-type')
    trace_object.remote_addr = os.environ["REMOTE_ADDR"]
    tags_string = self.request.get('tags')
    if tags_string:
      # Tags are comma separated and should only include alphanumeric + '-'.
      if re.match('^[a-zA-Z0-9-,]+$', tags_string):
        trace_object.tags = tags_string.split(',')
      else:
        logging.warning('The provided tags string includes one or more invalid'
                        ' characters and will be ignored')
    trace_object.user_agent = self.request.headers.get('User-Agent')
    trace_object.ver = self.request.get('product-version')
    trace_object.config = self.request.get('config')
    trace_object.put()

    self.response.write(trace_uuid)
Ejemplo n.º 4
0
  def post(self):
    trace_uuid = str(uuid.uuid4())
    if 'GCS_BUCKET_NAME' not in os.environ:
      bucket_name = app_identity.get_default_gcs_bucket_name()
    else:
      bucket_name = os.environ['GCS_BUCKET_NAME']
    gcs_path = ('/' + bucket_name + '/' + trace_uuid + '.gz')
    gcs_file = gcs.open(gcs_path,
                        'w',
                        content_type='application/octet-stream',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(self.request.get('trace'))
    gcs_file.close()

    trace_object = trace_info.TraceInfo(id=trace_uuid)
    trace_object.prod = self.request.get('prod')
    trace_object.network_type = self.request.get('network_type')
    trace_object.remote_addr = os.environ["REMOTE_ADDR"]
    tags_string = self.request.get('tags')
    if re.match('^[a-zA-Z0-9,]+$', tags_string): # ignore non alpha-numeric tags
      trace_object.tags = tags_string.split(',')
    trace_object.user_agent = self.request.headers.get('User-Agent')
    trace_object.ver = self.request.get('product_version')
    trace_object.put()

    self.response.write(trace_uuid)
Ejemplo n.º 5
0
def WriteGCS(fullurl, data):
  gcs_file = gcs.open(_remove_gcs_prefix(fullurl),
                      'w',
                      content_type='text/plain',
                      options={},
                      retry_params=default_retry_params)
  gcs_file.write(data)
  gcs_file.close()
Ejemplo n.º 6
0
def WriteGCS(fullurl, data):
    gcs_file = gcs.open(_remove_gcs_prefix(fullurl),
                        'w',
                        content_type='text/plain',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(data)
    gcs_file.close()
Ejemplo n.º 7
0
def ReadGCS(fullurl):
  gcs_file = gcs.open(_remove_gcs_prefix(fullurl),
                      'r',
                      retry_params=default_retry_params)

  contents = gcs_file.read()
  gcs_file.close()

  return contents
Ejemplo n.º 8
0
def ReadGCS(fullurl):
    gcs_file = gcs.open(_remove_gcs_prefix(fullurl),
                        'r',
                        retry_params=default_retry_params)

    contents = gcs_file.read()
    gcs_file.close()

    return contents
Ejemplo n.º 9
0
  def post(self):
    self.response.headers['Content-Type'] = 'text/plain'

    jobid = self.request.get('jobid')
    job = job_info.JobInfo.get_by_id(jobid)
    if not job:
      return

    payload = urllib.urlencode({'q': 'MAX_TRACE_HANDLES=10'})
    query_url = '%s/query?%s' % (_PERFORMANCE_INSIGHTS_URL, payload)
    result = urlfetch.fetch(url=query_url,
                            payload=payload,
                            method=urlfetch.GET,
                            follow_redirects=False,
                            deadline=10)
    logging.info(result.content)

    taskid = str(uuid.uuid4())
    traces = json.loads(result.content)

    default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                           max_delay=5.0,
                                           backoff_factor=2,
                                           max_retry_period=15)
    gcs_file = gcs.open(_DEFAULT_BUCKET.format(name=taskid),
                        'w',
                        content_type='text/plain',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(json.dumps(traces))
    gcs_file.close()

    credentials = AppAssertionCredentials(
        scope='https://www.googleapis.com/auth/compute')
    http = credentials.authorize(httplib2.Http(memcache))
    compute = build("compute", "v1", http=http)

    startup_script = _STARTUP_SCRIPT.format(
        revision=job.revision)

    result = self._CreateGCEInstace(
        compute, 'mr-%s' % jobid, startup_script)

    logging.info('Call to instances().insert response:\n')
    for k, v in sorted(result.iteritems()):
        logging.info(' %s: %s' % (k, v))

    job.status = 'COMPLETE'
    job.put()

    response = {'success': False}
    self.response.out.write(json.dumps(response))
Ejemplo n.º 10
0
def MaybeCreateDownload(bucket_name, object_id, filename):
  """If the obj is not huge, and no download version exists, create it."""
  src = '/%s%s' % (bucket_name, object_id)
  dst = '/%s%s-download' % (bucket_name, object_id)
  cloudstorage.validate_file_path(src)
  cloudstorage.validate_file_path(dst)
  logging.info('Maybe create %r from %r', dst, src)

  if IS_DEV_APPSERVER:
    logging.info('dev environment never makes download copies.')
    return False

  # If "Download" object already exists, we are done.
  try:
    cloudstorage.stat(dst)
    logging.info('Download version of attachment already exists')
    return True
  except errors.NotFoundError:
    pass

  # If "View" object is huge, give up.
  src_stat = cloudstorage.stat(src)
  if src_stat.st_size > MAX_ATTACH_SIZE_TO_COPY:
    logging.info('Download version of attachment would be too big')
    return False

  with cloudstorage.open(src, 'r') as infile:
    content = infile.read()
  logging.info('opened GCS object and read %r bytes', len(content))
  content_type = src_stat.content_type
  options = {
    'Content-Disposition': 'attachment; filename="%s"' % filename,
    }
  logging.info('Writing with options %r', options)
  with cloudstorage.open(dst, 'w', content_type, options=options) as outfile:
    outfile.write(content)
  logging.info('done writing')

  return True
Ejemplo n.º 11
0
def StoreObjectInGCS(
    content, mime_type, project_id, thumb_width=DEFAULT_THUMB_WIDTH,
    thumb_height=DEFAULT_THUMB_HEIGHT):
  bucket_name = app_identity.get_default_gcs_bucket_name()
  guid = uuid.uuid4()
  object_id = '/%s/attachments/%s' % (project_id, guid)
  object_path = '/' + bucket_name + object_id
  with cloudstorage.open(object_path, 'w', mime_type) as f:
    f.write(content)

  if mime_type in RESIZABLE_MIME_TYPES:
    # Create and save a thumbnail too.
    thumb_content = None
    try:
      thumb_content = images.resize(content, thumb_width, thumb_height)
    except Exception, e:
      # Do not raise exception for incorrectly formed images.
      # See https://bugs.chromium.org/p/monorail/issues/detail?id=597 for more
      # detail.
      logging.exception(e)
    if thumb_content:
      thumb_path = '%s-thumbnail' % object_path
      with cloudstorage.open(thumb_path, 'w', 'image/png') as f:
        f.write(thumb_content)
Ejemplo n.º 12
0
    def post(self):
        trace_uuid = str(uuid.uuid4())

        gcs_path = '/%s/%s.gz' % (cloud_config.Get().trace_upload_bucket,
                                  trace_uuid)
        gcs_file = gcs.open(gcs_path,
                            'w',
                            content_type='application/octet-stream',
                            options={},
                            retry_params=default_retry_params)
        gcs_file.write(self.request.get('trace'))
        gcs_file.close()

        trace_object = trace_info.TraceInfo(id=trace_uuid)
        trace_object.remote_addr = os.environ["REMOTE_ADDR"]

        for arg in self.request.arguments():
            arg_key = arg.replace('-', '_').lower()
            if arg_key in trace_object._properties:
                try:
                    setattr(trace_object, arg_key, self.request.get(arg))
                except datastore_errors.BadValueError:
                    pass

        scenario_config = self.request.get('config')
        if scenario_config:
            config_json = json.loads(scenario_config)
            if 'scenario_name' in config_json:
                trace_object.scenario_name = config_json['scenario_name']

        tags_string = self.request.get('tags')
        if tags_string:
            # Tags are comma separated and should only include alphanumeric + '-'.
            if re.match('^[a-zA-Z0-9-,]+$', tags_string):
                trace_object.tags = tags_string.split(',')
            else:
                logging.warning(
                    'The provided tags string includes one or more invalid'
                    ' characters and will be ignored')

        trace_object.ver = self.request.get('product-version')
        trace_object.put()

        self.response.write(trace_uuid)
Ejemplo n.º 13
0
  def post(self):
    trace_uuid = str(uuid.uuid4())

    gcs_path = '/%s/%s.gz' % (
        cloud_config.Get().trace_upload_bucket, trace_uuid)
    gcs_file = gcs.open(gcs_path,
                        'w',
                        content_type='application/octet-stream',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(self.request.get('trace'))
    gcs_file.close()

    trace_object = trace_info.TraceInfo(id=trace_uuid)
    trace_object.remote_addr = os.environ["REMOTE_ADDR"]

    for arg in self.request.arguments():
      arg_key = arg.replace('-', '_').lower()
      if arg_key in trace_object._properties:
        try:
          setattr(trace_object, arg_key, self.request.get(arg))
        except datastore_errors.BadValueError:
          pass

    scenario_config = self.request.get('config')
    if scenario_config:
      config_json = json.loads(scenario_config)
      if 'scenario_name' in config_json:
        trace_object.scenario_name = config_json['scenario_name']

    tags_string = self.request.get('tags')
    if tags_string:
      # Tags are comma separated and should only include alphanumeric + '-'.
      if re.match('^[a-zA-Z0-9-,]+$', tags_string):
        trace_object.tags = tags_string.split(',')
      else:
        logging.warning('The provided tags string includes one or more invalid'
                        ' characters and will be ignored')

    trace_object.ver = self.request.get('product-version')
    trace_object.put()

    self.response.write(trace_uuid)
Ejemplo n.º 14
0
def update_annotations_from_csv(filename="anotace.csv"):
    logging.info("Starting to update from CSV")

    my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                              max_delay=5.0,
                                              backoff_factor=2,
                                              max_retry_period=15)
    gcs.set_default_retry_params(my_default_retry_params)
    # bucket_name = os.environ.get('BUCKET_NAME',
    #                              app_identity.get_default_gcs_bucket_name())
    bucket_name = "tisic-knih.appspot.com"
    bucket = '/' + bucket_name
    filename = bucket + '/' + filename
    try:
        if is_dev_server():
            gcs_file = open("anotace.csv")
        else:
            gcs_file = gcs.open(filename)
        # gcs_file.seek(1000)
        r = ucsv.reader(gcs_file, encoding='utf-8')
        r.next()  # Skip first line (header).
        annotation_data = {}

        for row_number, row in enumerate(r):
            if row_number % 10000 == 0:
                logging.info("Processing row number {} of CSV file. "
                             .format(row_number))
            item_id = row[0]
            short_text = row[1]
            long_text = row[2]
            annotation_data[item_id] = (short_text, long_text)
            if len(annotation_data) > MAX_OBJECTS_PER_BATCH:
                deferred.defer(_put_annotations_batch, annotation_data)
                annotation_data = {}

        deferred.defer(_put_annotations_batch, annotation_data)
        gcs_file.close()
    except Exception as e:
        logging.error(e)
        raise deferred.PermanentTaskFailure()
Ejemplo n.º 15
0
def update_annotations_from_csv(filename="anotace.csv"):
    logging.info("Starting to update from CSV")

    my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
                                              max_delay=5.0,
                                              backoff_factor=2,
                                              max_retry_period=15)
    gcs.set_default_retry_params(my_default_retry_params)
    # bucket_name = os.environ.get('BUCKET_NAME',
    #                              app_identity.get_default_gcs_bucket_name())
    bucket_name = "tisic-knih.appspot.com"
    bucket = '/' + bucket_name
    filename = bucket + '/' + filename
    try:
        if is_dev_server():
            gcs_file = open("anotace.csv")
        else:
            gcs_file = gcs.open(filename)
        # gcs_file.seek(1000)
        r = ucsv.reader(gcs_file, encoding='utf-8')
        r.next()  # Skip first line (header).
        annotation_data = {}

        for row_number, row in enumerate(r):
            if row_number % 10000 == 0:
                logging.info("Processing row number {} of CSV file. ".format(
                    row_number))
            item_id = row[0]
            short_text = row[1]
            long_text = row[2]
            annotation_data[item_id] = (short_text, long_text)
            if len(annotation_data) > MAX_OBJECTS_PER_BATCH:
                deferred.defer(_put_annotations_batch, annotation_data)
                annotation_data = {}

        deferred.defer(_put_annotations_batch, annotation_data)
        gcs_file.close()
    except Exception as e:
        logging.error(e)
        raise deferred.PermanentTaskFailure()
Ejemplo n.º 16
0
  def post(self):
    trace_uuid = str(uuid.uuid4())
    bucket_name = ('/performance-insights/' + trace_uuid)
    gcs_file = gcs.open(bucket_name,
                        'w',
                        content_type='application/octet-stream',
                        options={},
                        retry_params=default_retry_params)
    gcs_file.write(self.request.get('trace'))
    gcs_file.close()

    trace_object = trace_info.TraceInfo(id=trace_uuid)
    trace_object.prod = self.request.get('prod')
    trace_object.network_type = self.request.get('network_type')
    trace_object.remote_addr = os.environ["REMOTE_ADDR"]
    tags_string = self.request.get('tags')
    if re.match('^[a-zA-Z0-9,]+$', tags_string): # ignore non alpha-numeric tags
      trace_object.tags = tags_string.split(',')
    trace_object.user_agent = self.request.headers.get('User-Agent')
    trace_object.ver = self.request.get('product_version')
    trace_object.put()

    self.response.write(trace_uuid)
Ejemplo n.º 17
0
    def post(self):
        trace_uuid = str(uuid.uuid4())
        bucket_name = ('/performance-insights/' + trace_uuid)
        gcs_file = gcs.open(bucket_name,
                            'w',
                            content_type='application/octet-stream',
                            options={},
                            retry_params=default_retry_params)
        gcs_file.write(self.request.get('trace'))
        gcs_file.close()

        trace_object = trace_info.TraceInfo(id=trace_uuid)
        trace_object.prod = self.request.get('prod')
        trace_object.network_type = self.request.get('network_type')
        trace_object.remote_addr = os.environ["REMOTE_ADDR"]
        tags_string = self.request.get('tags')
        if re.match('^[a-zA-Z0-9,]+$',
                    tags_string):  # ignore non alpha-numeric tags
            trace_object.tags = tags_string.split(',')
        trace_object.user_agent = self.request.headers.get('User-Agent')
        trace_object.ver = self.request.get('product_version')
        trace_object.put()

        self.response.write(trace_uuid)
Ejemplo n.º 18
0
    def GatherPageData(self, mr):
        """Parse the attachment ID from the request and serve its content.

    Args:
      mr: commonly used info parsed from the request.

    Returns:
      Dict of values used by EZT for rendering almost the page.
    """
        with mr.profiler.Phase('get issue, comment, and attachment'):
            try:
                attachment, issue = tracker_helpers.GetAttachmentIfAllowed(
                    mr, self.services)
            except exceptions.NoSuchIssueException:
                webapp2.abort(404, 'issue not found')
            except exceptions.NoSuchAttachmentException:
                webapp2.abort(404, 'attachment not found')
            except exceptions.NoSuchCommentException:
                webapp2.abort(404, 'comment not found')

        content = []
        if attachment.gcs_object_id:
            bucket_name = app_identity.get_default_gcs_bucket_name()
            full_path = '/' + bucket_name + attachment.gcs_object_id
            logging.info("reading gcs: %s" % full_path)
            with cloudstorage.open(full_path, 'r') as f:
                content = f.read()

        filesize = len(content)

        # This servlet only displays safe textual attachments. The user should
        # not have been given a link to this servlet for any other kind.
        if not attachment_helpers.IsViewableText(attachment.mimetype,
                                                 filesize):
            self.abort(400, 'not a text file')

        u_text, is_binary, too_large = filecontent.DecodeFileContents(content)
        lines = prettify.PrepareSourceLinesForHighlighting(
            u_text.encode('utf8'))

        config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
        granted_perms = tracker_bizobj.GetGrantedPerms(issue,
                                                       mr.auth.effective_ids,
                                                       config)
        page_perms = self.MakePagePerms(mr,
                                        issue,
                                        permissions.DELETE_ISSUE,
                                        permissions.CREATE_ISSUE,
                                        granted_perms=granted_perms)

        page_data = {
            'issue_tab_mode': 'issueDetail',
            'local_id': issue.local_id,
            'filename': attachment.filename,
            'filesize': template_helpers.BytesKbOrMb(filesize),
            'file_lines': lines,
            'is_binary': ezt.boolean(is_binary),
            'too_large': ezt.boolean(too_large),
            'code_reviews': None,
            'page_perms': page_perms,
        }
        if is_binary or too_large:
            page_data['should_prettify'] = ezt.boolean(False)
        else:
            page_data.update(
                prettify.BuildPrettifyData(len(lines), attachment.filename))

        return page_data