예제 #1
0
def save_history(request):
    """
    Save a copy of all ExtraAttrValue (labels, notes, ...) in a HistoryEntry
    :param request: must specify a comment to store with this copy
    :return: name of the zip file created
    :version: 2.0.0
    """
    version = 4
    user = request.user

    comment = get_or_error(request.POST, 'comment')
    database_id = get_or_error(request.POST, 'database')
    backup_type = get_or_error(request.POST, 'type')

    database = get_or_error(Database, dict(id=database_id))
    assert_permission(user, database, DatabasePermission.VIEW)
    assert_values(backup_type, ['labels', 'segmentation'])

    meta = dict(database=database_id,
                user=user.id,
                time=timezone.now(),
                version=version,
                note=comment,
                type=backup_type)

    zip_buffer = io.BytesIO()
    with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_BZIP2,
                         False) as zip_file:
        zip_file.writestr('meta.json', json.dumps(meta))
        zip_file.writestr('root.extraattrvalue.json',
                          'here for checking purpose')

        if backup_type == 'labels':
            save_label_history(database, user, zip_file)
        else:
            save_segmentation_history(database, user, zip_file)

    binary_content = zip_buffer.getvalue()

    he = HistoryEntry.objects.create(user=user,
                                     time=timezone.now(),
                                     database=database,
                                     version=version,
                                     note=comment,
                                     type=backup_type)
    filename = he.filename
    filepath = history_path(filename)
    ensure_parent_folder_exists(filepath)

    with open(filepath, 'wb') as f:
        f.write(binary_content)

    tz_offset = request.session['detected_tz']
    tz = offset_to_timezone(tz_offset)

    _, rows = bulk_get_history_entries([he],
                                       DotMap(user=user,
                                              database=database_id,
                                              tz=tz))
    return rows[0]
예제 #2
0
def bulk_get_history_entries(hes, extras):
    user = extras.user
    database = extras.database

    tz = extras.tz
    if isinstance(hes, QuerySet):
        values = hes.filter(database=database).values_list(
            'id', 'filename', 'time', 'user__username', 'user__id', 'database',
            'database__name', 'note', 'version', 'type')
    else:
        values = [(x.id, x.filename, x.time, x.user.username, x.user.id,
                   x.database, x.database.name, x.note, x.version, x.type)
                  for x in hes]

    ids = []
    rows = []
    for id, filename, time, creator, creator_id, database_id, database_name, note, version, type in values:
        ids.append(id)
        tztime = time.astimezone(tz)

        user_is_creator = user.id == creator_id
        can_import = user_is_creator or has_import_permission(
            user.id, database_id)

        if can_import:
            url_path = history_path(filename, for_url=True)
            local_file_path = url_path[1:]
            if os.path.isfile(local_file_path):
                file_size = os.path.getsize(local_file_path) / 1024
                url = '[{}]({})'.format(url_path, filename)
            else:
                url = 'File is missing'
                file_size = 0
                can_import = False
        else:
            file_size = 0
            url = 'Insufficient permission to download'

        row = dict(id=id,
                   url=url,
                   creator=creator,
                   time=tztime,
                   size=file_size,
                   database=database_name,
                   note=note,
                   version=version,
                   __can_import=can_import,
                   __can_delete=user_is_creator,
                   type=type)

        rows.append(row)

    return ids, rows
def _history_delete(sender, instance, **kwargs):
    """
    When a HistoryEntry is deleted, also delete its ZIP file
    :param sender:
    :param instance:
    :param kwargs:
    :return:
    """
    filepath = history_path(instance.filename)
    print('Delete {}'.format(filepath))
    if os.path.isfile(filepath):
        os.remove(filepath)
    else:
        warning('File {} doesnot exist.'.format(filepath))
예제 #4
0
def import_history(request):
    """
    Import a HistoryEntry from any user to this user.
    If this operation fails, the database is intact.
    :param request: must specify either : version-id, which is the id of the HistoryEntry object to be imported to
                                          or FILES['zipfile'] which should be created somewhere by Koe for someone
    :return: True if everything goes well.
    """
    version_id = request.POST.get('version-id', None)
    zip_file = request.FILES.get('zipfile', None)
    user = request.user

    current_database = get_user_databases(user)
    if current_database is None:
        raise CustomAssertionError(
            'You don\'t have a current working database')

    assert_permission(user, current_database, DatabasePermission.ANNOTATE)

    if not (version_id or zip_file):
        raise CustomAssertionError('No ID or file provided. Abort.')

    if version_id:
        he = HistoryEntry.objects.get(id=version_id)
        file = open(history_path(he.filename), 'rb')
    else:
        file = File(file=zip_file)

    filelist = {}
    with zipfile.ZipFile(file, "r") as zip_file:
        namelist = zip_file.namelist()
        for name in namelist:
            filelist[name] = zip_file.read(name)

    meta = json.loads(get_or_error(filelist, 'meta.json'))
    version = get_or_error(meta, 'version')
    backup_type = get_or_error(meta, 'type')

    if version < 4:
        raise CustomAssertionError(
            'This file format is too old and not supported anymore.')

    if backup_type == 'segmentation':
        retval = import_history_with_segmentation(current_database, user,
                                                  filelist)
        return dict(origin='import_history',
                    success=True,
                    warning=None,
                    payload=retval)

    try:
        contents = [
            get_or_error(filelist, 'segment.extraattrvalue.json'),
            get_or_error(filelist, 'audiofile.extraattrvalue.json')
        ]
        extra_attrs = json.loads(get_or_error(filelist, 'extraattr.json'))
        new_entries = []
        for content in contents:
            loaded = json.loads(content)
            new_entries += loaded
    except Exception:
        raise CustomAssertionError(
            'The history content is malformed and cannot be parsed.')

    new_entries = change_owner_and_attr_ids(new_entries, extra_attrs)

    retval = update_extra_attr_values(user, new_entries)
    return dict(origin='import_history',
                success=True,
                warning=None,
                payload=retval)