Esempio n. 1
0
def _localexport(channel_id, drive_id, update_progress=None, check_for_cancel=None, node_ids=None, exclude_node_ids=None, extra_metadata=None):
    drives = get_mounted_drives_with_channel_info()
    drive = drives[drive_id]

    call_command(
        "exportchannel",
        channel_id,
        drive.datafolder,
        update_progress=update_progress,
        check_for_cancel=check_for_cancel)
    try:
        call_command(
            "exportcontent",
            channel_id,
            drive.datafolder,
            node_ids=node_ids,
            exclude_node_ids=exclude_node_ids,
            update_progress=update_progress,
            check_for_cancel=check_for_cancel)
    except UserCancelledError:
        try:
            os.remove(get_content_database_file_path(channel_id, datafolder=drive.datafolder))
        except OSError:
            pass
        raise
Esempio n. 2
0
def _localexport(drive_id, update_progress=None, check_for_cancel=None):
    drives = get_mounted_drives_with_channel_info()
    drive = drives[drive_id]
    for channel in ChannelMetadataCache.objects.all():
        call_command("exportchannel",
                     channel.id,
                     drive.datafolder,
                     update_progress=update_progress,
                     check_for_cancel=check_for_cancel)
        try:
            call_command("exportcontent",
                         channel.id,
                         drive.datafolder,
                         update_progress=update_progress,
                         check_for_cancel=check_for_cancel)
        except UserCancelledError:
            try:
                os.remove(
                    get_content_database_file_path(
                        channel.id, datafolder=drive.datafolder))
            except OSError:
                pass
            connections.close_all()  # close all DB connections (FIX for #1818)
            raise
    connections.close_all()  # close all DB connections (FIX for #1818)
Esempio n. 3
0
def _localimport(drive_id, update_progress=None, check_for_cancel=None):
    drives = get_mounted_drives_with_channel_info()
    drive = drives[drive_id]
    # copy channel's db file then copy all the content files from storage dir
    try:
        for channel in drive.metadata["channels"]:
            call_command("importchannel",
                         "local",
                         channel["id"],
                         drive.datafolder,
                         update_progress=update_progress,
                         check_for_cancel=check_for_cancel)
            call_command("importcontent",
                         "local",
                         channel["id"],
                         drive.datafolder,
                         update_progress=update_progress,
                         check_for_cancel=check_for_cancel)
    except UserCancelledError:
        connections.close_all()  # close all DB connections (FIX for #1818)
        for channel in drive.metadata["channels"]:
            channel_id = channel["id"]
            try:
                os.remove(get_content_database_file_path(channel_id))
            except OSError:
                pass
            ChannelMetadataCache.objects.filter(id=channel_id).delete()
        connections.close_all()  # close all DB connections (FIX for #1818)s
        raise
    connections.close_all()  # close all DB connections (FIX for #1818)
Esempio n. 4
0
    def handle_async(self, *args, **options):
        channel_id = options["channel_id"]

        try:
            channel = ChannelMetadata.objects.get(pk=channel_id)
        except ChannelMetadata.DoesNotExist:
            raise CommandError(
                "Channel matching id {id} does not exist".format(
                    id=channel_id))

        logging.info("Deleting all channel metadata")
        channel.delete_content_tree_and_files()

        # Get orphan files that are being deleted
        total_file_deletion_operations = LocalFile.objects.get_orphan_files(
        ).filter(available=True).count()

        total_local_files_to_delete = LocalFile.objects.get_orphan_files(
        ).count()

        progress_extra_data = {
            "channel_id": channel_id,
        }

        with self.start_progress(total=total_file_deletion_operations +
                                 total_local_files_to_delete +
                                 1) as progress_update:
            logging.info("Deleting all channel metadata")

            for file in LocalFile.objects.delete_orphan_files():
                if file.available:
                    progress_update(1, progress_extra_data)

            LocalFile.objects.delete_orphan_file_objects()

            progress_update(total_local_files_to_delete, progress_extra_data)

            try:
                os.remove(get_content_database_file_path(channel_id))
            except OSError:
                pass

            progress_update(1, progress_extra_data)
Esempio n. 5
0
def _networkimport(channel_id, update_progress=None, check_for_cancel=None):
    call_command("importchannel",
                 "network",
                 channel_id,
                 update_progress=update_progress,
                 check_for_cancel=check_for_cancel)
    try:
        call_command("importcontent",
                     "network",
                     channel_id,
                     update_progress=update_progress,
                     check_for_cancel=check_for_cancel)
    except UserCancelledError:
        connections.close_all()  # close all DB connections (FIX for #1818)
        try:
            os.remove(get_content_database_file_path(channel_id))
        except OSError:
            pass
        ChannelMetadataCache.objects.filter(id=channel_id).delete()
        raise
    connections.close_all()  # close all DB connections (FIX for #1818)
    def set_content_fixture(self, db_path_mock):
        _, self.content_db_path = tempfile.mkstemp(suffix='.sqlite3')
        db_path_mock.return_value = self.content_db_path
        self.content_engine = create_engine('sqlite:///' +
                                            self.content_db_path,
                                            convert_unicode=True)

        with open(SCHEMA_PATH_TEMPLATE.format(name=self.schema_name),
                  'rb') as f:
            metadata = pickle.load(f)

        data_path = DATA_PATH_TEMPLATE.format(name=self.data_name)
        with io.open(data_path, mode='r', encoding='utf-8') as f:
            data = json.load(f)

        metadata.bind = self.content_engine

        metadata.create_all()

        conn = self.content_engine.connect()

        # Write data for each fixture into the table
        for table in metadata.sorted_tables:
            if data[table.name]:
                conn.execute(table.insert(), data[table.name])

        conn.close()

        with patch('kolibri.content.utils.sqlalchemybridge.get_engine',
                   new=self.get_engine):

            channel_metadata = read_channel_metadata_from_db_file(
                get_content_database_file_path(
                    '6199dde695db4ee4ab392222d5af1e5c'))

            # Double check that we have actually created a valid content db that is recognized as having that schema
            assert channel_metadata.inferred_schema_version == self.schema_name

            import_channel_from_local_db('6199dde695db4ee4ab392222d5af1e5c')