def append(self, request, **kwargs): self.is_authenticated(request) try: dataset_id = kwargs.get('dataset_id') del kwargs['dataset_id'] bundle = self.build_bundle(request=request) obj = self.obj_get(bundle, **self.remove_api_resource_names(kwargs)) csv_info = json.loads(request.POST.get('csv_info')) additional_fields = json.loads(request.POST.get('fields')) row_set = prepare_csv_rows(obj.file) sample_row = next(row_set.sample) table_name = create_database_table(sample_row, dataset_id, append=True) add_or_update_database_fields(table_name, additional_fields) populate_data(table_name, row_set) bundle.data['table_name'] = table_name populate_point_data(dataset_id, csv_info) obj.delete() # Temporary file has been moved to database, safe to delete except InternalError: logger.exception() raise ImmediateHttpResponse(HttpBadRequest('Error deploying file to database.')) return self.create_response(request, bundle)
def append(self, request, **kwargs): self.is_authenticated(request) try: dataset_id = kwargs.pop('dataset_id', None) bundle = self.build_bundle(request=request) obj = self.obj_get(bundle, **self.remove_api_resource_names(kwargs)) csv_info = json.loads(request.POST.get('csv_info')) additional_fields = json.loads(request.POST.get('fields')) prepared_csv = prepare_csv_rows(obj.file, csv_info) table_name = create_database_table( prepared_csv['row_set'], csv_info, dataset_id, append=True, additional_fields=additional_fields) self.populate_point_data(dataset_id, csv_info) bundle.data['table_name'] = table_name obj.delete( ) # Temporary file has been moved to database, safe to delete except Exception as e: logger.exception(e) raise ImmediateHttpResponse( HttpBadRequest(content=json.dumps( derive_error_response_data(e)), content_type='application/json')) return self.create_response(request, bundle)
def deploy(self, request, **kwargs): """ The deploy endpoint, at ``{tablo_server}/api/v1/temporary-files/{uuid}/{dataset_id}/deploy/`` deploys the file specified by {uuid} into a database table named after the {dataset_id}. The {dataset_id} must be unique for the instance of Tablo. With the deploy endpoint, this is the start of what Tablo considers an import. The data will be temporarily stored in an import table until the finalize endpoint for the dataset_id is called. POST messages to the deploy endpoint should include the following data: **csv_info** Information about the CSV file. This is generally that information obtained through the describe endpoint, but can be modified to send additional information or modify it. **fields** A list of field JSON objects in the following format: .. code-block:: json { "name": "field_name", "type": "text", "value": "optional value", "required": true } The value can be specified if the field is a constant value throughout the table. This can be use for adding audit information. :return: An empty HTTP 200 response if the deploy was successful. An error response if otherwise. """ self.is_authenticated(request) try: dataset_id = kwargs.get('dataset_id') del kwargs['dataset_id'] bundle = self.build_bundle(request=request) obj = self.obj_get(bundle, **self.remove_api_resource_names(kwargs)) csv_info = json.loads(request.POST.get('csv_info')) additional_fields = json.loads(request.POST.get('fields')) # Use separate iterator of table rows to not exaust the main one optional_fields = determine_optional_fields(prepare_csv_rows(obj.file)) row_set = prepare_csv_rows(obj.file) sample_row = next(row_set.sample) table_name = create_database_table(sample_row, dataset_id, optional_fields=optional_fields) populate_data(table_name, row_set) add_or_update_database_fields(table_name, additional_fields) bundle.data['table_name'] = table_name add_point_column(dataset_id) populate_point_data(dataset_id, csv_info) obj.delete() # Temporary file has been moved to database, safe to delete except InternalError: logger.exception() raise ImmediateHttpResponse(HttpBadRequest('Error deploying file to database.')) return self.create_response(request, bundle)
def deploy(self, request, **kwargs): """ The deploy endpoint, at ``{tablo_server}/api/v1/temporary-files/{uuid}/{dataset_id}/deploy/`` deploys the file specified by {uuid} into a database table named after the {dataset_id}. The {dataset_id} must be unique for the instance of Tablo. With the deploy endpoint, this is the start of what Tablo considers an import. The data will be temporarily stored in an import table until the finalize endpoint for the dataset_id is called. POST messages to the deploy endpoint should include the following data: **csv_info** Information about the CSV file. This is generally that information obtained through the describe endpoint, but can be modified to send additional information or modify it. **fields** A list of field JSON objects in the following format: .. code-block:: json { "name": "field_name", "type": "text", "required": true } The value can be specified if the field is a constant value throughout the table. This can be use for adding audit information. :return: An empty HTTP 200 response if the deploy was successful. An error response if otherwise. """ self.is_authenticated(request) try: dataset_id = kwargs.pop('dataset_id', None) bundle = self.build_bundle(request=request) obj = self.obj_get(bundle, **self.remove_api_resource_names(kwargs)) csv_info = json.loads(request.POST.get('csv_info')) additional_fields = json.loads(request.POST.get('fields')) prepared_csv = prepare_csv_rows(obj.file, csv_info) table_name = create_database_table( prepared_csv['row_set'], csv_info, dataset_id, additional_fields=additional_fields) add_geometry_column(dataset_id) self.populate_point_data(dataset_id, csv_info) bundle.data['table_name'] = table_name obj.delete( ) # Temporary file has been moved to database, safe to delete except Exception as e: logger.exception(e) raise ImmediateHttpResponse( HttpBadRequest(content=json.dumps( derive_error_response_data(e)), content_type='application/json')) return self.create_response(request, bundle)