def post(self, datasource_name):
        # url /datasource/<string:user_schema_name>/<string:datasource_name> {
        # 	"file_name": "cassava.csv",
        # }
        user_id = get_jwt_identity()

        user_schema_name = 'u{}'.format(user_id)
        user_table_name = datasource_name

        data = NewDatasource.parser.parse_args()
        datasource_description = data['datasource_description']
        upload_file = data['upload_file']
        file_extension = upload_file.filename.split('.')[1]

        datasource = DatasourceModel(datasource_name, datasource_description,
                                     user_schema_name, user_table_name,
                                     user_id)

        if datasource.find_by_name(datasource_name, user_id):
            return {
                'message': ERROR_DUPLICATE_NAME.format(datasource_name)
            }, 500

        upload = UploadModel(upload_file)
        try:
            file_id = str(upload.save_file_to_db())
        except Exception as e:
            return {'message': ERROR_CANNOT_UPLOAD_FILE}, 500

        file = File(file_id, file_extension)
        file.read_file_to_dataset()
        dataset_dataframe = file.get_dataframe()
        dataset_json = file.get_json()
        datasource = DatasourceModel(datasource_name, datasource_description,
                                     user_schema_name, user_table_name,
                                     user_id)

        try:
            datasource.new_datasource(dataset_dataframe)
        except Exception as e:
            return {'message': ERROR_CANNOT_CONVERT_TO_DATAFRAME}, 500
        try:
            datasource.save_to_db(file_id)
        except Exception as e:
            return {'message': ERROR_CANNOT_INSERT_DB}, 500

        return {'message': 'sucess', 'dataset': dataset_json}, 200
 def post(self, datasource_name):
     data = AddPrediction.parser.parse_args()
     predicted_data = data['predicted_data']
     datasource = DatasourceModel.find_by_name(datasource_name)
     user_schema_name = datasource.user_schema_name
     user_table_name = datasource.user_table_name
     dataset = File.read_json(predicted_data)
     try:
         DatasourceModel.append_datasource(dataset, user_schema_name,
                                           user_table_name)
     except Exception as e:
         return {'message': ERROR_CANNOT_INSERT_DB}, 500
     return {
         'message':
         'This Dataset has append to datasource ({})'.format(
             datasource_name)
     }, 200