Exemple #1
0
def upload(request):
    """Handle upload POST calls from UF."""

    if request.method != 'POST':
        return HttpResponseNotAllowed('No GET allowed')

    api_key = request.GET.get('api_key')
    username = request.GET.get('username')
    config_entity_id = request.GET.get('config_entity__id')
    file_name = request.GET.get('file_name')
    progress_id = request.GET.get('X-Progress-ID')

    user = get_user_model().objects.get(api_key__key=api_key,
                                        username=username)
    config_entity = ConfigEntity.objects.filter(
        id=config_entity_id).get_subclass()

    upload_task = create_upload_task(
        user,
        file_name,
        config_entity,
        extra_data_dict={'X-Progress-ID': progress_id})

    upload_handler = UploadProgressTaskHandler(max_progress=40,
                                               upload_task=upload_task)
    request.upload_handlers.insert(0, upload_handler)

    if request.FILES['files[]'].size > settings.MAX_UPLOAD_SIZE:
        return HttpResponseBadRequest(
            "The maximum file size for uploads is {} but the file you selected is {}"
            .format(filesizeformat(settings.MAX_UPLOAD_SIZE),
                    filesizeformat(request.FILES['files[]'].size)))

    # send the inital progress of 0%
    upload_task.send_progress()

    with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
        for chunk in request.FILES['files[]'].chunks():
            tmpfile.write(chunk)

    try:
        process_file(tmpfile.name,
                     should_create_db_entity=True,
                     user=user,
                     config_entity=config_entity,
                     upload_task=upload_task)
    except:
        upload_task.send_error()
        raise

    return HttpResponse()
Exemple #2
0
def upload(request):
    """Handle upload POST calls from UF."""

    if request.method != 'POST':
        return HttpResponseNotAllowed('No GET allowed')

    api_key = request.GET.get('api_key')
    username = request.GET.get('username')
    config_entity_id = request.GET.get('config_entity__id')
    file_name = request.GET.get('file_name')
    progress_id = request.GET.get('X-Progress-ID')

    user = get_user_model().objects.get(api_key__key=api_key, username=username)
    config_entity = ConfigEntity.objects.filter(id=config_entity_id).get_subclass()

    upload_task = create_upload_task(
        user,
        file_name,
        config_entity,
        extra_data_dict={'X-Progress-ID': progress_id}
    )

    upload_handler = UploadProgressTaskHandler(max_progress=40, upload_task=upload_task)
    request.upload_handlers.insert(0, upload_handler)

    if request.FILES['files[]'].size > settings.MAX_UPLOAD_SIZE:
        return HttpResponseBadRequest(
            "The maximum file size for uploads is {} but the file you selected is {}".format(
                filesizeformat(settings.MAX_UPLOAD_SIZE),
                filesizeformat(request.FILES['files[]'].size)
            )
        )

    # send the inital progress of 0%
    upload_task.send_progress()

    with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
        for chunk in request.FILES['files[]'].chunks():
            tmpfile.write(chunk)

    try:
        process_file(tmpfile.name, should_create_db_entity=True, user=user, config_entity=config_entity, upload_task=upload_task)
    except:
        upload_task.send_error()
        raise

    return HttpResponse()
Exemple #3
0
def create_db_entity(pg_dump_fpath, db_entity_key, db_entity_name, table_name,
                     layer_count, user, config_entity, **kwargs):
    """
    Create a DbEntity and all associated layers, etc. The majority of the
    processing occurs in the post-save methods on DbEntity objects, this method
    simply gets objects in the necessary state to trigger it.
    """

    logger.debug("Creating DbEntity %s with pg_dump file %s", db_entity_key,
                 pg_dump_fpath)

    if 'upload_task' in kwargs:
        upload_task = kwargs['upload_task']
    else:
        # we're calling this from the command line
        # for testing purposes
        upload_task = create_upload_task(
            user,
            pg_dump_fpath,
            config_entity,
            extra_data_dict={'X-Progress-ID': 'unused'})

    # later post-save processes expect a zipped sql file
    zipped_sql_fpath = "{}.zip".format(pg_dump_fpath)
    with ZipFile(zipped_sql_fpath, 'w') as zipped_sql:
        zipped_sql.write(pg_dump_fpath)

    # the UploadDataset represents the processing of a single file
    upload_dataset_task = UploadDatasetTask.objects.create(
        upload_task=upload_task,
        dataset_id=-1,
        file_path=zipped_sql_fpath,
        filename=db_entity_key,
        progress=upload_task.progress,
        status=upload_task.status,
        extra=upload_task.extra)

    # the schema metadata, has information necessary for Django to create
    # new data models based on the upload. The DbEntity post-save
    # logic uses this.
    schema_metadata = get_schema_metadata(pg_dump_fpath, table_name)
    upload_dataset_task.metadata = schema_metadata
    logger.debug("Saving DbEntity %s and inititialzing post-save processing.",
                 db_entity_key)
    upload_dataset_task.save()

    db_entity = DbEntity(creator=user,
                         updater=user,
                         name=db_entity_name,
                         key=db_entity_key,
                         url='file://{}'.format(zipped_sql_fpath),
                         setup_percent_complete=0,
                         schema=config_entity.schema())

    # setting `_config_entity` and then calling `save()` triggers
    # the post-save processing flow, which, among other things,
    # loads the data into the database, creates layers and runs
    # required updates to other model objects to be aware of this
    # layer.
    db_entity._config_entity = config_entity
    db_entity.save()

    db_entity.categories.add(
        Category.objects.get(key=DbEntityCategoryKey.KEY_CLASSIFICATION,
                             value=DbEntityCategoryKey.REFERENCE))

    upload_dataset_task.progress = 100
    upload_dataset_task.status = UploadDatasetTask.SUCCESS
    upload_dataset_task.ended_on = timezone.now()
    upload_dataset_task.save()
    upload_dataset_task.send_progress()

    finished_dataset_count = UploadDatasetTask.objects.filter(
        upload_task=upload_task, status=UploadDatasetTask.SUCCESS).count()
    if finished_dataset_count == layer_count:
        upload_task.progress = 100
        upload_task.status = UploadTask.SUCCESS
        upload_task.ended_on = timezone.now()
        upload_task.save()
        upload_task.send_progress()

    message_kwargs = dict(event="doCreateDbEntity",
                          id=db_entity.id,
                          name=db_entity_name,
                          key=db_entity_key,
                          config_entity=config_entity.id,
                          file_dataset=upload_dataset_task.id)

    # send websockets `doCreateDbEntity` signal to the browser
    send_message_to_client(user.id, message_kwargs)
Exemple #4
0
def create_db_entity(pg_dump_fpath, db_entity_key, db_entity_name, table_name, layer_count, user, config_entity, **kwargs):
    """
    Create a DbEntity and all associated layers, etc. The majority of the
    processing occurs in the post-save methods on DbEntity objects, this method
    simply gets objects in the necessary state to trigger it.
    """

    logger.debug("Creating DbEntity %s with pg_dump file %s", db_entity_key, pg_dump_fpath)

    if 'upload_task' in kwargs:
        upload_task = kwargs['upload_task']
    else:
        # we're calling this from the command line
        # for testing purposes
        upload_task = create_upload_task(
            user,
            pg_dump_fpath,
            config_entity,
            extra_data_dict={'X-Progress-ID': 'unused'}
        )

    # later post-save processes expect a zipped sql file
    zipped_sql_fpath = "{}.zip".format(pg_dump_fpath)
    with ZipFile(zipped_sql_fpath, 'w') as zipped_sql:
        zipped_sql.write(pg_dump_fpath)

    # the UploadDataset represents the processing of a single file
    upload_dataset_task = UploadDatasetTask.objects.create(
        upload_task=upload_task,
        dataset_id=-1,
        file_path=zipped_sql_fpath,
        filename=db_entity_key,
        progress=upload_task.progress,
        status=upload_task.status,
        extra=upload_task.extra
    )

    # the schema metadata, has information necessary for Django to create
    # new data models based on the upload. The DbEntity post-save
    # logic uses this.
    schema_metadata = get_schema_metadata(pg_dump_fpath, table_name)
    upload_dataset_task.metadata = schema_metadata
    logger.debug("Saving DbEntity %s and inititialzing post-save processing.", db_entity_key)
    upload_dataset_task.save()

    db_entity = DbEntity(
        creator=user,
        updater=user,
        name=db_entity_name,
        key=db_entity_key,
        url='file://{}'.format(zipped_sql_fpath),
        setup_percent_complete=0,
        schema=config_entity.schema()
    )

    # setting `_config_entity` and then calling `save()` triggers
    # the post-save processing flow, which, among other things,
    # loads the data into the database, creates layers and runs
    # required updates to other model objects to be aware of this
    # layer.
    db_entity._config_entity = config_entity
    db_entity.save()

    db_entity.categories.add(
        Category.objects.get(
            key=DbEntityCategoryKey.KEY_CLASSIFICATION,
            value=DbEntityCategoryKey.REFERENCE
        )
    )

    upload_dataset_task.progress = 100
    upload_dataset_task.status = UploadDatasetTask.SUCCESS
    upload_dataset_task.ended_on = timezone.now()
    upload_dataset_task.save()
    upload_dataset_task.send_progress()

    finished_dataset_count = UploadDatasetTask.objects.filter(upload_task=upload_task, status=UploadDatasetTask.SUCCESS).count()
    if finished_dataset_count == layer_count:
        upload_task.progress = 100
        upload_task.status = UploadTask.SUCCESS
        upload_task.ended_on = timezone.now()
        upload_task.save()
        upload_task.send_progress()

    message_kwargs = dict(
        event="doCreateDbEntity",
        id=db_entity.id,
        name=db_entity_name,
        key=db_entity_key,
        config_entity=config_entity.id,
        file_dataset=upload_dataset_task.id
    )

    # send websockets `doCreateDbEntity` signal to the browser
    send_message_to_client(
        user.id,
        message_kwargs
    )