Beispiel #1
0
def upload(file: UploadedFile) -> str:
    """
    Upload an image, if it does not exist.
    """
    buffer = io.BytesIO()
    for chunk in file.chunks():
        buffer.write(chunk)

    result_id = hashlib.blake2s(buffer.getbuffer()).hexdigest()
    result_path = os.path.join(MEDIA_ROOT, result_id + '.png')

    if os.path.isfile(result_path):
        return result_id

    try:
        image = Image.open(buffer)
    except OSError:  # This is not an image
        raise

    with open(result_path, 'wb') as result_file:
        image.save(result_file, 'PNG')

    send_to_ml(result_path)

    return result_id
Beispiel #2
0
def save_uploaded_file(file: UploadedFile) -> str:
    with tempfile.NamedTemporaryFile(mode="wb",
                                     delete=False,
                                     suffix=f"_{file.name}") as dest:
        for chunk in file.chunks():
            dest.write(chunk)
    return os.path.join(tempfile.gettempdir(), dest.name)
Beispiel #3
0
class ChunkedS3VideoUploader:
    def __init__(self, djfile, key, name):
        self.s3 = boto3.client(
            "s3",
            aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
            aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY
        )

        self.file = UploadedFile(
            djfile,
            djfile.name,
            djfile.content_type,
            djfile.size,
            djfile.charset,
            djfile.content_type_extra
        )

        self.key = key + "/" + name
        self.video_name = djfile.video_name
        self.user_email = djfile.user_email

        self.bucket_and_key_mixin = {
            "Bucket": settings.AWS_STORAGE_BUCKET_NAME,
            "Key": self.key
        }

    def upload(self):
        self.file.seek(0, os.SEEK_SET)

        m_upload = self.s3.create_multipart_upload(**self.bucket_and_key_mixin)
        uid = m_upload["UploadId"]

        chunks = self.file.chunks(settings.CHUNK_SIZE)
        chunks_list = list()

        for chunk in chunks:
            chunks_list.append(chunk.hex())

        try:
            finish_file_upload.delay(
                bucket_and_key=self.bucket_and_key_mixin,
                upload_id=uid,
                file_chunks=chunks_list,
                video_name=self.video_name,
                user_email=self.user_email
            )
        except Exception as exc:
            print(exc)

            self.s3.abort_multipart_upload(
                Bucket=settings.AWS_STORAGE_BUCKET_NAME,
                Key=m_upload["Key"],
                UploadId=uid
            )

        return self.s3.generate_presigned_url(
            "get_object",
            Params=self.bucket_and_key_mixin,
            HttpMethod="GET"
        )
Beispiel #4
0
 def create_file(cls, path: str, file: UploadedFile) -> bool:
     try:
         with open(os.path.join(path, file.name), 'wb') as f:
             for chunk in file.chunks():
                 f.write(chunk)
         return True
     except OSError:
         return False
def handle_uploaded_file(f: UploadedFile, user_id):
    # Экземпляр FileSystemStorage
    fs = FileSystemStorage('fileStore/' + user_id + "/files")

    # Получаем доступное имя файла
    name = fs.get_available_name(f.name)

    # Записываем файл
    with fs.open(name, 'wb') as destination:
        for chunk in f.chunks():
            destination.write(chunk)
    return name
Beispiel #6
0
    def save_uploaded_file(self, uploaded_file: UploadedFile):
        save_path = pathlib.Path(settings.MEDIA_ROOT, 'sources',
                                 str(self.event_id), uploaded_file.name)
        if save_path.exists():
            save_path = save_path.with_name("{}_{}{}".format(
                save_path.name, time.time(), save_path.suffix))

        save_path.parent.mkdir(parents=True, exist_ok=True, mode=0o775)
        with open(str(save_path), mode='wb') as save_file:
            for chunk in uploaded_file.chunks():
                save_file.write(chunk)

        self.file = str(save_path.relative_to(settings.MEDIA_ROOT))
        return
Beispiel #7
0
def write_file_to_disk(workspace: str, local_file: UploadedFile) -> None:
    """
    Write file uploaded to server to unique workspace.

    Parameters
    ----------
    workspace: str
        Unique workspace for current task.
    local_file: UploadedFile
        Uploaded file to local server.
    """
    filepath = os.path.join(workspace, local_file.name)
    with open(filepath, "wb+") as destination:
        for chunk in local_file.chunks():
            destination.write(chunk)
Beispiel #8
0
def get_hashsums(uploaded_file: UploadedFile):
    hash_sums = {}
    hash_sums['md5sum'] = hashlib.md5()
    hash_sums['sha1sum'] = hashlib.sha1()
    hash_sums['sha224sum'] = hashlib.sha224()
    hash_sums['sha256sum'] = hashlib.sha256()
    hash_sums['sha384sum'] = hashlib.sha384()
    hash_sums['sha512sum'] = hashlib.sha512()

    for data_chunk in uploaded_file.chunks(chunk_size=4096):
    	for hashsum in hash_sums.keys():
            hash_sums[hashsum].update(data_chunk)

    results = {}
    for key,value in hash_sums.items():
         results[key] = value.hexdigest()         
    return dict(results)
Beispiel #9
0
def show(request):
    content = {}
    if request.method == 'POST':
        uploaded_file = request.FILES['file']
        uploaded_file_chunks = UploadedFile.chunks(uploaded_file,
                                                   chunk_size=100)
        print(uploaded_file.name)
        print(uploaded_file.size)
        fs = FileSystemStorage()
        fs.save(uploaded_file.name, uploaded_file)
        global upload
        upload = Upload()
        upload.title = uploaded_file.name
        upload.file = uploaded_file
        upload.save()
        global filecontent
        filecontent = file_content()
        path = os.path.join(BASE_DIR, 'media/') + uploaded_file.name
        with open(path) as csv_file:
            csv_reader = csv.reader(csv_file, delimiter=',')
            next(csv_reader)
            for row in csv_reader:
                if not is_paused:
                    filecontent.seq = row[0]
                    filecontent.First_name = row[1]
                    filecontent.Last_name = row[2]
                    filecontent.age = row[3]
                    filecontent.street = row[4]
                    filecontent.city = row[5]
                    filecontent.state = row[6]
                    filecontent.zipcode = row[7]
                    filecontent.dollar = row[8]
                    filecontent.color = row[9]
                    filecontent.date = row[10]
                    filecontent.CSV_ID = upload.pk
                    filecontent.save()
                    time.sleep(1)
                    if stopped:
                        break
                while is_paused:
                    time.sleep(1)
                    print("paused")
    return render(request, 'upload/upload.html', content)
Beispiel #10
0
def save_file_to_drive(file: UploadedFile, raw_dir: Path) -> Optional[str]:
    """
    :param file: A Django UploadedFile file
    :param raw_dir: A path to user uploaded unprocessed corpora files.
    """
    filename = Path(file.name)
    stem, suffix = slugify(filename.stem), filename.suffix
    filename = f'{stem}{suffix}'
    # logger.debug(f'Before: {file.name}, after: {filename}, type: {type(filename)}')

    # if raw_dir.joinpath(filename).exists():
    #     return None

    logging.info(f'Received: {filename}, size: {file.size}')
    path = raw_dir / filename
    with open(path, 'wb+') as fp:
        for chunk in file.chunks():
            fp.write(chunk)
    logging.info(f'Writing to disk complete.')

    return filename
Beispiel #11
0
def set_config_file(key: str, file: UploadedFile):
    from palanaeum.models import ConfigEntry

    try:
        entry = ConfigEntry.objects.get(key=key)
        if entry.value:
            os.unlink(os.path.join(settings.MEDIA_ROOT, entry.value))
    except ConfigEntry.DoesNotExist:
        entry = ConfigEntry(key=key)
    except FileNotFoundError:
        pass

    file_path = os.path.join(settings.CONFIG_UPLOADS, file.name)
    os.makedirs(settings.CONFIG_UPLOADS, exist_ok=True)
    entry.value = os.path.relpath(file_path, settings.MEDIA_ROOT)

    with open(file_path, mode='wb') as write_file:
        for chunk in file.chunks():
            write_file.write(chunk)

    entry.save()
    cache.set(key, entry.value)
    return
def accept_uploaded_photo(request, album_id):
    """
    Main Multiuploader module.
    Parses data from jQuery plugin and makes database changes.
    """
    if request.method == 'POST':
        logid = random.randint(0,1000)
        log.info('[%s] received POST to main multiuploader view' % logid)
        if request.FILES == None:
            return HttpResponseBadRequest('Must have files attached!')

        #getting file data for farther manipulations
        file = request.FILES[u'files[]']
        wrapped_file = UploadedFile(file)
        filename = wrapped_file.name
        file_size = wrapped_file.file.size
        log.info ('[%s] Got file: "%s"' % (logid, str(filename)))

        # Write out file to disk as a temp file
        randnumber = logid # use the random number here too
        temp_filename = '%stmp%s_%s' % (settings.TEMP_DIRECTORY,randnumber, filename)
        log.info('[%s] Writing out to: %s' % (logid, temp_filename))
        destination = open(temp_filename, 'wb+')
        if wrapped_file.multiple_chunks():
            for chunk in wrapped_file.chunks():
                destination.write(chunk)
        else:
            destination.write(wrapped_file.read())
        destination.close()

        # Dump out EXIF Tags
#        im = Image.open(temp_filename)
#        if hasattr( im, '_getexif' ):
#            exifinfo = im._getexif()
#            if exifinfo:
#                for tag, value in exifinfo.items():
#                    decoded = ExifTags.TAGS.get(tag, tag)
#                    log.info('Found tag: %s, value: %s' % (decoded,value))

        orientation = None
        date_taken = None
        # Make full size and thumbsize
        try:
            im = Image.open(temp_filename)
        except IOError:
            log.info('[%s] Error opening file %s: %s %s' % (logid, temp_filename, e.errno, e))
            return HttpResponseBadRequest('Could not read file')

        if hasattr( im, '_getexif' ):
            exifinfo = im._getexif()
            if exifinfo:
                for tag, value in exifinfo.items():
                    decoded = ExifTags.TAGS.get(tag, tag)
#                    if decoded != 'MakerNote':
#                        if decoded != 'UserComment':
#                            log.info('Found tag: %s, value: %s' % (decoded,value))
                    if decoded == 'Orientation':
                        orientation = value
                        log.info('[%s] Found tag: %s, value: %s' % (logid,decoded,value))
                    elif decoded == 'DateTime':
                        date_taken =  datetime.strptime(value, "%Y:%m:%d %H:%M:%S")
                        log.info('[%s] Found tag: %s, value: %s, date_taken=%s' % (logid,decoded,value,date_taken))

        # We rotate regarding to the EXIF orientation information
        if orientation:
            if orientation == 1:
                # Nothing
                log.info('[%s] Orientation: No rotation necessary' % logid)
                pass
            elif orientation == 2:
                # Vertical Mirror
                log.info('[%s] Orientation: Vertical flip' % logid)
                im = im.transpose(Image.FLIP_LEFT_RIGHT)
            elif orientation == 3:
                # Rotation 180
                log.info('[%s] Orientation: Rotation 180' % logid)
                im = im.transpose(Image.ROTATE_180)
            elif orientation == 4:
                # Horizontal Mirror
                log.info('[%s] Orientation: Horizontal Mirror' % logid)
                im = im.transpose(Image.FLIP_TOP_BOTTOM)
            elif orientation == 5:
                # Horizontal Mirror + Rotation 270
                log.info('[%s] Orientation: Flip top bottom, rot 270' % logid)
                im = im.transpose(Image.FLIP_TOP_BOTTOM).transpose(Image.ROTATE_270)
            elif orientation == 6:
                # Rotation 270
                log.info('[%s] Orientation: Rotate 270' % logid)
                im = im.transpose(Image.ROTATE_270)
            elif orientation == 7:
                # Vertical Mirror + Rotation 270
                log.info('[%s] Orientation: Flip left right, rotate 270' % logid)
                im = im.transpose(Image.FLIP_LEFT_RIGHT).transpose(Image.ROTATE_270)
            elif orientation == 8:
                # Rotation 90
                log.info('[%s] Orientation: Rotate 90' % logid)
                im = im.transpose(Image.ROTATE_90)

        #------------------
        # Save the transposed image to disk
        orig_path = '%stmp%s_mod%s' % (settings.TEMP_DIRECTORY,randnumber, filename)
        # keep 100% fidelity on the image
        try:
            log.info('[%s] Writing corrected photo to path %s' % (logid,orig_path))
            im.save(orig_path, "JPEG", quality=100)
        except IOError:
            log.info('[%s] Error saving file %s: %s %s' % (logid, orig_path, e.errno, e))
            return HttpResponseBadRequest('Could not save file')

        #------------------
        # Save the photo object into the database
        album = Album.objects.get(id=album_id)
        photo = Photo()
        photo.album = album

        log.info('[%s] Determining photo order' % logid)
        #------------------
        # Determine where in the photo order this picture needs to be
        photo.order = 0
        if date_taken:
            photo.photodate = date_taken
            log.info('[%s] Date Taken is %s' % (logid,date_taken))
            # Now try to insert the photo by date taken in the order list
            prev_photo = photo.prev_photo_by_photodate()
            if prev_photo:
                log.info('got prev photo.  id=%s, photodate=%s, order=%s' % (prev_photo.id,prev_photo.photodate,prev_photo.order))
                photo.order = prev_photo.order
            else:
                # First in album
                photo.order = 0
        else:
            # Last in album
            photo.order = album.photo_set.count() + 1

        log.info('[%s] Writing photo entry to database' % logid)
        #------------------
        # Now finally write the entry to the db
        photo.save()
        log.info('[%s] Photo object saved.  id = %s, order = %s' % (logid, photo.id,photo.order))
        #album.reorder_photos()

        log.info('[%s] Attempting to save file %s to django model id %s' % (logid, orig_path, photo.id))
        f = open(orig_path, 'r')
        photo.filename.save('%s.jpg' % photo.id, File(f))
        f.close()

        log.info('[%s] Cleaning up files' % logid)
        #clean up temp file
        unlink(temp_filename)
        unlink(orig_path)

        #settings imports
        file_delete_url = 'multi_delete/'

        thumbnail_options = dict(size=(200, 200), crop=True)
        thumb_url = get_thumbnailer(photo.filename).get_thumbnail(thumbnail_options).url

        #generating json response array
        result = []
        result.append({"name":filename,
                       "size":file_size,
                       "url": thumb_url,
                       "thumbnail_url":thumb_url,
                       "delete_url":'/',
                       "delete_type":"POST",})
        response_data = simplejson.dumps(result)

        #checking for json data type
        #big thanks to Guy Shapiro
        if "application/json" in request.META['HTTP_ACCEPT_ENCODING']:
            mimetype = 'application/json'
        else:
            mimetype = 'text/plain'
        return HttpResponse(response_data, mimetype=mimetype)
    else: #GET
        return HttpResponse('Only POST accepted')
Beispiel #13
0
def calculate_checksum(uploaded_file: UploadedFile) -> str:
    md5 = hashlib.md5()
    for chunk in uploaded_file.chunks():
        md5.update(chunk)
    return md5.hexdigest()
Beispiel #14
0
def detect_mime_type(uploaded_file: UploadedFile) -> str:
    """Detect mime type of an uploaded file"""
    magic = Magic(mime=True)
    chunk = next(uploaded_file.chunks(chunk_size=2048))
    return magic.from_buffer(chunk)