예제 #1
0
    def handle_async(self, *args, **options):
        channel_id = options["channel_id"]
        logging.info("Downloading data for channel id {}".format(channel_id))

        url = os.path.join(
            settings.CENTRAL_CONTENT_DOWNLOAD_DOMAIN,
            "content",
            "databases",
            "{}.sqlite3".format(channel_id),
        )

        dest = os.path.join(
            settings.CONTENT_DATABASE_DIR,
            "{}.sqlite3".format(channel_id),
        )

        logging.debug("URL to fetch: {}".format(url))
        logging.debug("Destination: {}".format(dest))

        r = requests.get(url, stream=True)
        r.raise_for_status()

        dbsize = int(r.headers['content-length'])

        with self.start_progress(total=dbsize) as progress_update:
            with open(dest, "wb") as f:
                for content in r.iter_content(1024):
                    f.write(content)
                    contentlength = len(content)
                    progress_update(contentlength)

        update_channel_metadata_cache()
예제 #2
0
    def handle_async(self, *args, **options):
        channel_id = options["channel_id"]
        logging.info("Downloading data for channel id {}".format(channel_id))

        url = os.path.join(
            settings.CENTRAL_CONTENT_DOWNLOAD_DOMAIN,
            "content",
            "databases",
            "{}.sqlite3".format(channel_id),
        )

        dest = os.path.join(
            settings.CONTENT_DATABASE_DIR,
            "{}.sqlite3".format(channel_id),
        )

        logging.debug("URL to fetch: {}".format(url))
        logging.debug("Destination: {}".format(dest))

        r = requests.get(url, stream=True)
        r.raise_for_status()

        dbsize = int(r.headers['content-length'])

        with self.start_progress(total=dbsize) as progress_update:
            with open(dest, "wb") as f:
                for content in r.iter_content(1024):
                    f.write(content)
                    contentlength = len(content)
                    progress_update(contentlength)

        update_channel_metadata_cache()
예제 #3
0
    def handle(self, *args, **options):

        if options["webpack"]:
            self.spawn_webpack(lint=options["lint"])
        if options["karma"]:
            self.spawn_karma()

        update_channel_metadata_cache()

        return super(Command, self).handle(*args, **options)
예제 #4
0
파일: server.py 프로젝트: rayykim/kolibri
def start():

    # TODO(aronasorman): move to install/plugin-enabling scripts, and remove from here
    call_command("collectstatic", interactive=False)
    call_command("collectstatic_js_reverse", interactive=False)
    call_command("migrate", interactive=False)

    update_channel_metadata_cache()

    run_server()
예제 #5
0
    def handle(self, *args, **options):

        if options["webpack"]:
            self.spawn_webpack()

        if options["karma"]:
            self.spawn_karma()

        update_channel_metadata_cache()

        return super(Command, self).handle(*args, **options)
예제 #6
0
def start():

    # TODO(aronasorman): move to install/plugin-enabling scripts, and remove from here
    call_command("collectstatic", interactive=False)
    call_command("collectstatic_js_reverse", interactive=False)
    call_command("migrate", interactive=False, database="default")
    call_command("migrate", interactive=False, database="ormq")

    update_channel_metadata_cache()

    run_server()
예제 #7
0
파일: server.py 프로젝트: jamalex/kolibri
def start():

    # TODO(aronasorman): move to install/plugin-enabling scripts, and remove from here
    call_command("collectstatic", interactive=False)
    call_command("collectstatic_js_reverse", interactive=False)
    call_command("migrate", interactive=False, database="default")
    call_command("migrate", interactive=False, database="ormq")

    # start the qcluster process
    start_background_workers()

    update_channel_metadata_cache()

    run_server()
예제 #8
0
def start():

    # TODO(aronasorman): move to install/plugin-enabling scripts, and remove from here
    call_command("collectstatic", interactive=False)
    call_command("collectstatic_js_reverse", interactive=False)
    call_command("migrate", interactive=False, database="default")
    call_command("migrate", interactive=False, database="ormq")

    # start the qcluster process
    # don't run on windows; we don't run a full cluster there.
    if platform.system() != "Windows":
        start_background_workers()

    update_channel_metadata_cache()

    run_server()
예제 #9
0
    def handle(self, *args, **options):

        if options["webpack"]:
            self.spawn_webpack(lint=options["lint"])

        if options["karma"]:
            self.spawn_karma()

        if options["qcluster"] and platform.system() != "Windows":
            self.spawn_qcluster()

        update_channel_metadata_cache()

        # migrate the ormq DB before starting.
        call_command("migrate", interactive=False, database="ormq")

        return super(Command, self).handle(*args, **options)
예제 #10
0
    def handle(self, *args, **options):

        if options["webpack"]:
            self.spawn_webpack(lint=options["lint"])

        if options["karma"]:
            self.spawn_karma()

        if options["qcluster"] and platform.system() != "Windows":
            self.spawn_qcluster()

        update_channel_metadata_cache()

        # migrate the ormq DB before starting.
        call_command("migrate", interactive=False, database="ormq")

        return super(Command, self).handle(*args, **options)
예제 #11
0
    def handle_async(self, *args, **options):
        channel_id = options["channel_id"]
        logging.info("Downloading data for channel id {}".format(channel_id))

        url = paths.get_content_database_file_url(channel_id)
        dest = paths.get_content_database_file_path(channel_id)

        logging.debug("URL to fetch: {}".format(url))
        logging.debug("Destination: {}".format(dest))

        with transfer.FileDownload(url, dest) as download:

            with self.start_progress(total=download.total_size) as progress_update:

                for chunk in download:
                    progress_update(len(chunk))

        update_channel_metadata_cache()
예제 #12
0
파일: server.py 프로젝트: navakk9/kolibri
def start(port=None):

    server_port = port or 8080

    # TODO(aronasorman): move to install/plugin-enabling scripts, and remove from here
    call_command("collectstatic", interactive=False)
    call_command("collectstatic_js_reverse", interactive=False)
    call_command("migrate", interactive=False, database="default")
    call_command("migrate", interactive=False, database="ormq")

    # start the qcluster process
    # don't run on windows; we don't run a full cluster there.
    if platform.system() != "Windows":
        start_background_workers()

    from kolibri.content.utils.annotation import update_channel_metadata_cache
    update_channel_metadata_cache()

    run_server(port=server_port)
예제 #13
0
파일: server.py 프로젝트: yujinyuz/kolibri
def start(port=8080):
    """
    Starts the server.

    :param: port: Port number (default: 8080)
    """

    # Write the new PID
    with open(PID_FILE, 'w') as f:
        f.write("%d\n%d" % (os.getpid(), port))

    # This should be run every time the server is started for now.
    # Events to trigger it are hard, because of copying a content folder into
    # ~/.kolibri, or deleting a channel DB on disk
    from kolibri.content.utils.annotation import update_channel_metadata_cache
    update_channel_metadata_cache()

    def rm_pid_file():
        os.unlink(PID_FILE)

    atexit.register(rm_pid_file)

    run_server(port=port)
예제 #14
0
def test_annotation():
    update_channel_metadata_cache()