コード例 #1
0
def base_start(request):
    session = safe_get_session(request)
    data = get_data(request)

    dump_io_to_file(session, "start", request)

    with get_collection(session) as col:
        col_handler = SyncCollectionHandler(col)

        # FIXME: for the moment we are just ignoring the 'offset' parameter, hoping it will just work if
        # we only use the V1 scheduler
        output = col_handler.start(min_usn=data.get("minUsn"),
                                   lnewer=data.get("lnewer"),
                                   offset=data.get("offset"))

        ## The following gets values that are required for subsequent calls in a sync flow
        ## as a side-effect of the start. This obviously needs to be completely rethought!!!
        ##
        ## We also need the dump_base in the media so we set for a first time in meta, as start isn't
        ## called if there is no main db sync needed
        ##
        ## FIXME: this needs serious testing for at least:
        ## - failure and then start again with a new sync
        session["min_usn"] = col_handler.min_usn
        session["max_usn"] = col_handler.max_usn
        session["lnewer"] = col_handler.lnewer
        session.save()

        resp = JsonResponse(output)

    dump_io_to_file(session, "start", resp)

    return resp
コード例 #2
0
def media_downloadFiles(request):
    SYNC_ZIP_SIZE = int(2.5 * 1024 * 1024)
    SYNC_ZIP_COUNT = 25

    session = safe_get_session(request)
    data = get_data(request)

    dump_io_to_file(session, "downloadFiles", request, is_media=True)

    with get_collection(session) as col:
        files = data["files"]
        flist = {}
        cnt = 0
        sz = 0
        f = io.BytesIO()

        with zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED) as z:
            for fname in files:
                z.write(os.path.join(col.media_dir(), fname), str(cnt))
                flist[str(cnt)] = fname
                sz += os.path.getsize(os.path.join(col.media_dir(), fname))
                if sz > SYNC_ZIP_SIZE or cnt > SYNC_ZIP_COUNT:
                    break
                cnt += 1

            z.writestr("_meta", json.dumps(flist))

        resp = HttpResponse(f.getvalue())
    dump_io_to_file(session, "downloadFiles", resp, is_media=True)

    return resp
コード例 #3
0
def base_upload(request):
    session = safe_get_session(request)

    dump_io_to_file(session, "upload", request)

    db_bytes = get_data(request)["data"]

    resp = HttpResponse(full_upload(db_bytes, session["name"]))
    dump_io_to_file(session, "upload", resp)

    return resp
コード例 #4
0
def base_sanityCheck2(request):
    session = safe_get_session(request)
    data = get_data(request)

    dump_io_to_file(session, "sanityCheck2", request)

    with get_collection(session) as col:
        col_handler = SyncCollectionHandler(col, session=session)
        output = col_handler.sanityCheck2(client=data.get("client"))
        resp = JsonResponse(output)
    dump_io_to_file(session, "sanityCheck2", resp)

    return resp
コード例 #5
0
def media_mediaSanity(request):
    session = safe_get_session(request)

    dump_io_to_file(session, "mediaSanity", request, is_media=True)

    with get_collection(session) as col:
        if col.media_count() == get_data(request)["local"]:
            resp = JsonResponse({"data": "OK", "err": ""})
        else:
            resp = JsonResponse({"data": "FAILED", "err": ""})

    dump_io_to_file(session, "mediaSanity", resp, is_media=True)

    return resp
コード例 #6
0
def base_applyGraves(request):
    session = safe_get_session(request)
    data = get_data(request)

    dump_io_to_file(session, "applyGraves", request)

    with get_collection(session) as col:
        col_handler = SyncCollectionHandler(col, session=session)
        col_handler.applyGraves(chunk=data.get("chunk"))

        resp = JsonResponse({})
    dump_io_to_file(session, "applyGraves", resp)

    return resp
コード例 #7
0
def media_mediaChanges(request):
    session = safe_get_session(request)

    dump_io_to_file(session, "mediaChanges", request, is_media=True)

    with get_collection(session) as col:
        data = get_data(request)

        resp = JsonResponse({
            "data": col.media_changes(data["lastUsn"]),
            "err": ""
        })
    dump_io_to_file(session, "mediaChanges", resp, is_media=True)

    return resp
コード例 #8
0
def base_hostKey(request):
    data = get_data(request)

    print_request(request)

    username = data.get("u")
    password = data.get("p")

    if not authenticate(username=username, password=password):
        raise PermissionDenied

    s = SessionStore()
    s.create()
    s["skey"] = s.session_key
    s["name"] = username

    s.save()

    return JsonResponse({"key": s.session_key, "hostNum": 1, "host_number": 2})
コード例 #9
0
def base_applyChanges(request):
    session = safe_get_session(request)
    data = get_data(request)

    dump_io_to_file(session, "applyChanges", request)

    with get_collection(session) as col:
        col_handler = SyncCollectionHandler(col, session=session)

        ## tests for doing it by chunking rather than all in one go
        # output = col_handler.applyChanges(changes=data.get("changes"))
        # session["tablesLeft"] = col_handler.tablesLeft
        # cache.set(session.skey, session)

        output = col_handler.applyChanges(changes=data.get("changes"))
        resp = JsonResponse(output)

    dump_io_to_file(session, "applyChanges", resp)
    return resp
コード例 #10
0
def media_uploadChanges(request):
    session = safe_get_session(request)

    dump_io_to_file(session, "uploadChanges", request, is_media=True)

    with get_collection(session) as col:
        data = get_data(request)["data"]
        """
        The zip file contains files the client hasn't synced with the server
        yet ('dirty'), and info on files it has deleted from its own media dir.
        """
        with zipfile.ZipFile(io.BytesIO(data), "r") as z:
            col.check_zip_data(z)
            processed_count = col.adopt_media_changes_from_zip(z)

        resp = JsonResponse({
            "data": [processed_count, col.last_media_usn()],
            "err": ""
        })
    dump_io_to_file(session, "uploadChanges", resp, is_media=True)

    return resp