Ejemplo n.º 1
0
def upload_to_s3(
        directory,
        settings,
        modified_files,
        region='us-east-1',
        log=logging.getLogger(__name__),
):
    """Uploads the local directory to the S3 Package Index"""
    s3_conn = None
    try:
        s3_conn = boto.s3.connect_to_region(region)
    except (
            boto.exception.NoAuthHandlerFound,
    ) as error:
        log.critical(error)
    else:
        s3_prefix = ensure_ends_with_slash(settings.get('s3.prefix'))

        s3_bucket = s3_conn.get_bucket(settings.get('s3.bucket'))

        for modified_file in modified_files:
            key = boto.s3.key.Key(
                bucket=s3_bucket,
                name=(s3_prefix + modified_file[len(directory)+1:]),
            )
            log.info(
                'Uploading "%s" to "%s" in "%s"',
                modified_file,
                key.name,
                key.bucket.name,
            )
            key.set_contents_from_filename(
                modified_file,
            )
            key.set_acl('public-read')
Ejemplo n.º 2
0
def add_thumb(config, local_filename, remote_filename, extension):
    bucket = _get_s3_bucket(config)

    key = boto.s3.key.Key(bucket)
    key.key = remote_filename
    key.set_contents_from_filename(local_filename)
    key.set_metadata('Content-Type', "image/" + extension)
    key.set_acl('public-read')
Ejemplo n.º 3
0
def add_thumb(config, local_filename, remote_filename, extension):
    bucket = _get_s3_bucket(config)

    key = boto.s3.key.Key(bucket)
    key.key = remote_filename
    key.set_contents_from_filename(local_filename)
    key.set_metadata('Content-Type', "image/" + extension)
    key.set_acl('public-read')
Ejemplo n.º 4
0
def add_cv(config, person_id, contents, filename):
    person_id = str(int(person_id))
    assert person_id != 0

    bucket = _get_s3_bucket(config)

    when = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S-")

    key = boto.s3.key.Key(bucket)
    key.key = "cvs/" + str(person_id) + "/" + when + filename
    key.set_contents_from_string(contents)
    key.set_acl('public-read')
Ejemplo n.º 5
0
def add_cv(config, person_id, contents, filename):
    person_id = str(int(person_id))
    assert person_id != 0

    bucket = _get_s3_bucket(config)

    when = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S-")

    key = boto.s3.key.Key(bucket)
    key.key = "cvs/" + str(person_id) + "/" + when + filename
    key.set_contents_from_string(contents)
    key.set_acl('public-read')
Ejemplo n.º 6
0
 def uploadBundleBackend(cls, bundleItemGen, fileCount, totalSize,
         bucket, permittedUsers=None, callback=None, policy="private"):
     current = 0
     for i, (biName, biSize, biFileObj) in enumerate(bundleItemGen):
         keyName = os.path.basename(biName)
         key = bucket.new_key(keyName)
         if callback:
             cb = cls.UploadCallback(callback, biName,
                 i + 1, fileCount, current, totalSize).callback
         else:
             cb = None
         current += biSize
         key.set_contents_from_file(biFileObj, cb=cb, policy=policy)
         if permittedUsers:
         # Grant additional permissions
             key = bucket.get_key(keyName)
             acl = key.get_acl()
             for user in permittedUsers:
                 acl.acl.add_user_grant('READ', user)
             key.set_acl(acl)
Ejemplo n.º 7
0
def _upload_file_to_bucket(server, filename):
    '''
    Upload the file to the bucket and returns the URL to serve that file.
    Using the server, upload filename to a bucket. The bucket
    is in server.path. After that, user server.url to generate
    the URL that will be used to server the image from now on
    and return that URL.
    '''
    _, filename_part = os.path.split(filename)
    pathname = _generate_path_name_for(filename_part)
    bucket_name = server.path
    #connection = boto.s3.connection.S3Connection()
    connection = boto.connect_s3(
        aws_access_key_id=_get_aws_access_key_id(),
        aws_secret_access_key=_get_aws_secret_access_key())
    bucket = connection.get_bucket(bucket_name)
    key = boto.s3.key.Key(bucket)
    key.key = pathname
    key.set_contents_from_filename(filename)
    key.set_acl('public-read')
    connection.close()
    return '{}/{}'.format(server.url, pathname)
Ejemplo n.º 8
0
def new_data_to_publish_to_s3(config, section, new):
    # Get the metadata for our old chunk

    # If necessary fetch the existing data from S3, otherwise open a local file
    if ((config.has_option('main', 's3_upload')
         and config.getboolean('main', 's3_upload'))
            or (config.has_option(section, 's3_upload')
                and config.getboolean(section, 's3_upload'))):
        conn = boto.s3.connection.S3Connection()
        bucket = conn.get_bucket(config.get('main', 's3_bucket'))
        s3key = config.get(section, 's3_key') or config.get(section, 'output')
        key = bucket.get_key(s3key)
        if key is None:
            # most likely a new list
            print('{0} looks like it hasn\'t been uploaded to '
                  's3://{1}/{2}'.format(section, bucket.name, s3key))
            key = boto.s3.key.Key(bucket)
            key.key = s3key
            key.set_contents_from_string('a:1:32:32\n' + 32 * '1')
        current = tempfile.TemporaryFile()
        key.get_contents_to_file(current)
        key.set_acl('bucket-owner-full-control')
        if CLOUDFRONT_USER_ID is not None:
            key.add_user_grant('READ', CLOUDFRONT_USER_ID)
        current.seek(0)
    else:
        current = open(config.get(section, 'output'), 'rb')

    old = chunk_metadata(current)
    current.close()

    s3_upload_needed = False
    if old['checksum'] != new['checksum']:
        s3_upload_needed = True

    return s3_upload_needed
Ejemplo n.º 9
0
  def upload(self, mpi, source, target, pos = 0, chunk = 0, part = 0):
    '''Thread worker for upload operation.'''
    s3url = S3URL(target)
    bucket = self.s3.lookup(s3url.bucket, validate=self.opt.validate)

    # Initialization: Set up multithreaded uploads.
    if not mpi:
      fsize = os.path.getsize(source)
      key = bucket.get_key(s3url.path)

      # optional checks
      if self.opt.dry_run:
        message('%s => %s', source, target)
        return
      elif self.opt.sync_check and self.sync_check(source, key):
        message('%s => %s (synced)', source, target)
        return
      elif not self.opt.force and key:
        raise Failure('File already exists: %s' % target)

      # extra headers
      extra_headers = {}
      if self.opt.add_header:
        for hdr in self.opt.add_header:
          try:
            key, val = hdr.split(":", 1)
          except ValueError:
            raise Failure("Invalid header format: %s" % hdr)
          key_inval = re.sub("[a-zA-Z0-9-.]", "", key)
          if key_inval:
            key_inval = key_inval.replace(" ", "<space>")
            key_inval = key_inval.replace("\t", "<tab>")
            raise ParameterError("Invalid character(s) in header name '%s': \"%s\"" % (key, key_inval))
          extra_headers[key.strip().lower()] = val.strip()

      # Small file optimization.
      if fsize < self.opt.max_singlepart_upload_size:
        key = boto.s3.key.Key(bucket)
        key.key = s3url.path
        key.set_metadata('privilege',  self.get_file_privilege(source))
        key.set_contents_from_filename(source, reduced_redundancy=self.opt.reduced_redundancy, headers=extra_headers)
        if self.opt.acl_public:
          key.set_acl('public-read')
        message('%s => %s', source, target)
        return

      # Here we need to have our own md5 value because multipart upload calculates
      # different md5 values.
      mpu = bucket.initiate_multipart_upload(s3url.path, metadata = {'md5': self.file_hash(source), 'privilege': self.get_file_privilege(source)})

      for args in self.get_file_splits(mpu.id, source, target, fsize, self.opt.multipart_split_size):
        self.pool.upload(*args)
      return

    # Handle each part in parallel, post initialization.
    for mp in bucket.list_multipart_uploads():
      if mp.id == mpi.id:
        mpu = mp
        break
    if mpu is None:
      raise Failure('Could not find MultiPartUpload %s' % mpu_id)

    data = None
    with open(source, 'rb') as f:
      f.seek(pos)
      data = f.read(chunk)
    if not data:
      raise Failure('Unable to read data from source: %s' % source)

    mpu.upload_part_from_file(StringIO(data), part)

    # Finalize
    if mpi.complete():
      try:
        mpu.complete_upload()
        message('%s => %s', source, target)
      except Exception as e:
        mpu.cancel_upload()
        raise RetryFailure('Upload failed: Unable to complete upload %s.' % source)
Ejemplo n.º 10
0
def upload(request):
    if request.method == "GET":

        return render(request, "upload.html")
    else:

        user = request.user
        file = request.FILES.get("file", None)

        if file is None:
            return HttpResponseBadRequest("You did not pass a file.")

        title = request.POST.get("title", "")
        desc = request.POST.get("desc", "")
        channel_id_str = request.POST.get("channel", "1")

        try:
            channel_id = int(channel_id_str)
        except ValueError:
            return HttpResponseBadRequest("Channel ID must be an integer")

        if title == "":
            return HttpResponseBadRequest("Title must not be empty.")

        if not re.match(r'^.{0,2000}$', title):
            return HttpResponseBadRequest("Description must not be longer than 2000 characters.")

        if not Channel.objects.filter(pk=channel_id).exists():
            return HttpResponseBadRequest("Channel ID must be a valid channel.")

        channel = Channel.objects.get(pk=channel_id)

        if not channel.members.filter(id=user.id).exists():
            return HttpResponseBadRequest("You do not own that channel.")


        video = Video.objects.create(uploader=user)
        video.title = title
        video.desciption = desc
        video.channel = channel
        video.save()

        video_file = VideoFile.objects.create()
        video_file.save()
        video_file.format = file.content_type

        video.video_files.add(video_file)

        video_file.save()
        video.save()

        conn = boto.s3.connection.S3Connection(bitvid.dbinfo.AWS_ACCESS, bitvid.dbinfo.AWS_SECRET)

        bucket = conn.get_bucket(VIDEO_BUCKET_NAME)
        bucket.set_acl("public-read")
        key = boto.s3.key.Key(bucket)

        video_path = str(video.id) + "/" + "original.mp4"

        video_file.url = "http://"+VIDEO_BUCKET_NAME +".s3.amazonaws.com/"+video_path #"http://d6iy9bzn1qbz8.cloudfront.net/" + video_path

        key.key = video_path
        key.set_contents_from_filename(file.temporary_file_path())

        key.set_acl('public-read')

        conn.close()

        video_file.save()
        return HttpResponse(str(video.id))
Ejemplo n.º 11
0
def upload(request):
    if request.method == "GET":

        return render(request, "upload.html")
    else:

        user = request.user
        file = request.FILES.get("file", None)

        if file is None:
            return HttpResponseBadRequest("You did not pass a file.")

        title = request.POST.get("title", "")
        desc = request.POST.get("desc", "")
        channel_id_str = request.POST.get("channel", "1")

        try:
            channel_id = int(channel_id_str)
        except ValueError:
            return HttpResponseBadRequest("Channel ID must be an integer")

        if title == "":
            return HttpResponseBadRequest("Title must not be empty.")

        if not re.match(r'^.{0,2000}$', title):
            return HttpResponseBadRequest(
                "Description must not be longer than 2000 characters.")

        if not Channel.objects.filter(pk=channel_id).exists():
            return HttpResponseBadRequest(
                "Channel ID must be a valid channel.")

        channel = Channel.objects.get(pk=channel_id)

        if not channel.members.filter(id=user.id).exists():
            return HttpResponseBadRequest("You do not own that channel.")

        video = Video.objects.create(uploader=user)
        video.title = title
        video.desciption = desc
        video.channel = channel
        video.save()

        video_file = VideoFile.objects.create()
        video_file.save()
        video_file.format = file.content_type

        video.video_files.add(video_file)

        video_file.save()
        video.save()

        conn = boto.s3.connection.S3Connection(bitvid.dbinfo.AWS_ACCESS,
                                               bitvid.dbinfo.AWS_SECRET)

        bucket = conn.get_bucket(VIDEO_BUCKET_NAME)
        bucket.set_acl("public-read")
        key = boto.s3.key.Key(bucket)

        video_path = str(video.id) + "/" + "original.mp4"

        video_file.url = "http://" + VIDEO_BUCKET_NAME + ".s3.amazonaws.com/" + video_path  #"http://d6iy9bzn1qbz8.cloudfront.net/" + video_path

        key.key = video_path
        key.set_contents_from_filename(file.temporary_file_path())

        key.set_acl('public-read')

        conn.close()

        video_file.save()
        return HttpResponse(str(video.id))