Beispiel #1
0
    def post(self):
        inputdict = dict((k, v[-1]) for k, v in self.request.arguments.items())
        #文件的暂存路径
        filedb = "upload"
        if ("filedb" in inputdict.keys()):
            filedb = inputdict["filedb"].decode("utf-8")

        client = MotorClient(config.ServerParameters.mongodbpath)
        db = client.jt808
        filename = None
        fileid = None

        if (len(self.request.files) <= 0):
            self.write(tigerfunctools.WebApiResultJson(1, "没有上传文件", None))
            return

        uploaderror = 0

        for fk in self.request.files:
            files = self.request.files[fk]
            for meta in files:
                filename = meta['filename']
                bucket = MotorGridFSBucket(db,
                                           filedb)  #,chunk_size_bytes=32768)

                up = bucket.open_upload_stream(filename)
                if (up == None):
                    self.write(
                        tigerfunctools.WebApiResultJson(1, "写入数据库错误", None))
                    return

                fileid = up._id
                yield up.write(meta["body"])
                yield up.close()
                if (fileid == None):
                    uploaderror = uploaderror + 1

        if (uploaderror == 0):
            rrr = dict()
            rrr["id"] = str(fileid)
            rrr["filename"] = filename
            self.write(tigerfunctools.WebApiResultJson(0, "上传成功", rrr))
        else:
            self.write(
                tigerfunctools.WebApiResultJson(
                    1, "有" + str(uploaderror) + "个文件上传出错", null))
Beispiel #2
0
 async def test_stream_to_handler(self):
     fs = MotorGridFSBucket(self.db)
     content_length = 1000
     await fs.delete(1)
     await fs.upload_from_stream_with_id(1,
                                         'filename',
                                         source=b'a' * content_length)
     gridout = await fs.open_download_stream(1)
     handler = test.MockRequestHandler()
     await gridout.stream_to_handler(handler)
     self.assertEqual(content_length, handler.n_written)
     await fs.delete(1)
Beispiel #3
0
    async def test_iter_gridfs(self):
        gfs = MotorGridFSBucket(self.db)

        async def cleanup():
            await self.db.fs.files.delete_many({})
            await self.db.fs.chunks.delete_many({})

        await cleanup()

        # Empty iterator.
        async for _ in gfs.find({'_id': 1}):
            self.fail()

        data = b'data'

        for n_files in 1, 2, 10:
            for i in range(n_files):
                async with gfs.open_upload_stream(filename='filename') as f:
                    await f.write(data)

            # Force extra batches to test iteration.
            j = 0
            async for _ in gfs.find({'filename': 'filename'}).batch_size(3):
                j += 1

            self.assertEqual(j, n_files)
            await cleanup()

        await gfs.upload_from_stream_with_id(
            1, 'filename', source=data, chunk_size_bytes=1)
        cursor = gfs.find({'_id': 1})
        await cursor.fetch_next
        gout = cursor.next_object()
        chunks = []
        async for chunk in gout:
            chunks.append(chunk)

        self.assertEqual(len(chunks), len(data))
        self.assertEqual(b''.join(chunks), data)
Beispiel #4
0
    async def test_iter_gridfs(self):
        gfs = MotorGridFSBucket(self.db)

        async def cleanup():
            await self.db.fs.files.delete_many({})
            await self.db.fs.chunks.delete_many({})

        await cleanup()

        # Empty iterator.
        async for _ in gfs.find({'_id': 1}):
            self.fail()

        data = b'data'

        for n_files in 1, 2, 10:
            for i in range(n_files):
                async with gfs.open_upload_stream(filename='filename') as f:
                    await f.write(data)

            # Force extra batches to test iteration.
            j = 0
            async for _ in gfs.find({'filename': 'filename'}).batch_size(3):
                j += 1

            self.assertEqual(j, n_files)
            await cleanup()

        await gfs.upload_from_stream_with_id(1,
                                             'filename',
                                             source=data,
                                             chunk_size_bytes=1)
        cursor = gfs.find({'_id': 1})
        await cursor.fetch_next
        gout = cursor.next_object()
        chunks = []
        async for chunk in gout:
            chunks.append(chunk)

        self.assertEqual(len(chunks), len(data))
        self.assertEqual(b''.join(chunks), data)
Beispiel #5
0
    def get(self):
        inputdict = dict((k, v[-1]) for k, v in self.request.arguments.items())

        filename = None
        filedb = "upload"
        smaxwidth = None
        smaxheight = None
        id = None

        if ("id" in inputdict.keys()):
            id = inputdict["id"].decode("utf-8")
        if ("filename" in inputdict.keys()):
            filename = inputdict["filename"].decode("utf-8")
        if ("filedb" in inputdict.keys()):
            filedb = inputdict["filedb"].decode("utf-8")
        if ("maxwidth" in inputdict.keys()):
            smaxwidth = inputdict["maxwidth"].decode("utf-8")
        if ("maxheight" in inputdict.keys()):
            smaxheight = inputdict["maxheight"].decode("utf-8")

        maxwidth = 0
        maxheight = 0
        if (smaxwidth != None):
            maxwidth = int(smaxwidth)
        if (smaxheight != None):
            maxheight = int(smaxheight)

        maxstr = ""
        if (maxwidth != 0 or maxheight != 0):
            maxstr = str(maxwidth) + "_" + str(maxheight)

        if (filename == None):
            filename = "goldhonor.jpg"

        client = MotorClient(config.ServerParameters.mongodbpath)
        db = client.jt808

        bucket = MotorGridFSBucket(db, filedb)
        if (id != None):
            fs = bucket.find({"_id": ObjectId(id)})
        else:
            fs = bucket.find({"filename": filename})

        if (fs == None):
            return

        yield fs.fetch_next
        fi = fs.next_object()
        if (fi == None):
            return

        filename = fi.filename
        m_time = fi.upload_date

        r_time = utf8(self.request.headers.get("If-Modified-Since", ""))
        if (len(r_time) > 8):
            r_time = parser.parse(r_time)
            if (r_time == m_time):
                self.set_status(304)
                return

        ds = yield bucket.open_download_stream(
            fi._id)  #,dict( CheckMD5 = False, Seekable = True )
        if (ds == None):
            return

        isimage = False
        filelength = fi.length
        isresize = False
        newds = None
        if (filename.find(".jpg") >= 0 or filename.find(".bmp") >= 0
                or filename.find(".gif") >= 0 or filename.find(".png") >= 0):
            isimage = True
            if (maxheight == 0 and maxwidth == 0):
                newds = None
            else:
                newds = yield AsycnGetCustomSmallPic(ds, maxwidth, maxheight)
                if (newds != None):
                    newds.seek(0, 2)
                    filelength = newds.tell()
                    isresize = True
                    newds.seek(0, 0)
        '''
        etag = str(fi.upload_date) + str(filelength)
        self.set_header("Etag", etag)

        if (self.check_etag_header()==True):
            ds.close()
            newds.close()
            self.set_status(304)
            return
        '''

        p = 0
        range = self.request.headers.get("Range", "")
        if (len(range) > 0):
            self.set_status(205)  #断点续传
            p = int(range.replace("bytes=", "").replace("-", ""))
            if (p >= filelength):
                return
        self.set_header(
            "Content-Range", "bytes " + str(p) + "-" + str(filelength - 1) +
            "/" + str(filelength))
        self.set_header("Content-Length", str(filelength - p))
        if (isimage == True):
            self.set_header("Content-Type", "image/" + "jpg")
        else:
            self.set_header("Content-Type", "application/octet-stream")
        if (isresize):
            self.set_header(
                "Content-Disposition", "attachment;" +
                urlparse.urlencode({"filename": maxstr + filename}))
        else:
            self.set_header(
                "Content-Disposition",
                "attachment;" + urlparse.urlencode({"filename": filename}))
        self.add_header(
            "Date",
            datetime.datetime.now())  #.strftime("%a, %d %h %Y %H:%M:%S GMT")
        self.add_header("Last-Modified", m_time)
        self.add_header("Expires",
                        datetime.datetime.now() + datetime.timedelta(days=365))
        self.set_header("Cache-Control: public", True)

        if (isresize):
            newds.seek(p, 0)
            bbb = newds.read()
            self.write(bbb[p:])
        else:
            yield ds.seek(p)
            bbb = yield ds.read()
            self.write(bbb)

        if (newds != None):
            newds.close()
        if (ds != None):
            yield ds.close()
        return