def getDatasetObjectsPrimary(dataset_id): user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) if (Dataset.objects.get( Q(id=dataset_id) & (Q(public=True) | Q(author=user))) != None): filename = dataset_id + ".csv" fileFromS3 = s3.get_object(Bucket="agriworks-user-datasets", Key=filename) dataset = pd.read_csv(fileFromS3["Body"], dtype=str) else: return Response("You do not have access to that dataset.", status=403) if (len(dataset) <= 1000): return Response({ "datasetObjects": DatasetService.buildDatasetObjectsList(dataset) }) else: cacheId = str(uuid4()) DatasetCache[cacheId] = dataset[1000:] return Response({ "datasetObjects": DatasetService.buildDatasetObjectsList(dataset[:1000]), "cacheId": cacheId })
def get(pageNumber): retList = [] datasets = [] user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) allDatasets = Dataset.objects.filter(Q(public=True) | Q( author=user)).order_by('-dateCreated') if pageNumber == "all": datasets = allDatasets elif pageNumber == "0": datasets = allDatasets[:16] else: datasetIndex = 16 + 12 * (int(pageNumber) - 1) datasets = allDatasets[datasetIndex:datasetIndex + 12] if len(datasets) == 0: return Response("No datasets matching the query were found", status=400) for dataset in datasets: retList.append(DatasetService.createDatasetInfoObject(dataset)) return Response(retList)
def createView(self, request): print("Creating view") try: # Get the user to establish view ownership user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) if (not user): return {"message": "Invalid session", "status": 400} # Get the dataset to link view to dataset #problem if the dataset name is not unique datasetId = request.form.get("dataset") dataset = Dataset.objects.get(id=datasetId) if (not dataset): return {"message": "Invalid dataset ID", "status": 400} viewAuthor = user viewDataset = dataset viewVisualtype = request.form.get("visualType") viewXData = request.form.get("xData") viewYData = request.form.get("yData") # Create and save view object view = AgriWatchView(author=viewAuthor, dataset=viewDataset, visualType=viewVisualtype, xData=viewXData, yData=viewYData) view.save() return view except ValidationError as e: print(e) return None
def getUsersDatasets(): retList = [] user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) datasets = Dataset.objects.filter(author=user).order_by('-dateCreated') for dataset in datasets: if dataset == None: return Response("No datasets found", status=400) retList.append(DatasetService.createDatasetInfoObject(dataset)) return Response(retList)
def decorator(*args, **kwargs): if "SID" not in request.cookies: return Response(status=403) cookie = request.cookies["SID"] try: user = AuthenticationService.verifySessionAndReturnUser(cookie) return f(*args, **kwargs) except Exception as e: print(e) return Response("Invalid session ID", status=403)
def get(self): retList = [] user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) views = AgriWatchView.objects.filter( author=user).order_by('-dateCreated') for view in views: if view == None: return Response("No views found", status=400) retList.append(AgriWatchViewService.makeViewObject(view)) return Response(retList)
def verifySession(): try: sessionId = request.form["sessionId"] if not AuthenticationService.verifySessionAndReturnUser(sessionId): return Response("Your session has expired. Please login again.", status=401) else: return Response(status=200) except DoesNotExist as e: return Response("Your session was not found. Please login again.", status=401) except ValueError as e: return Response("Invalid session. Please login again.", status=400)
def new(): try: retList = [] user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) # get users datasets by date created and sort by descending order newDatasets = Dataset.objects(author=user).order_by("-dateCreated")[:5] for dataset in newDatasets: if dataset == None: return Response("No datasets found", status=404) retList.append(DatasetService.createDatasetInfoObject(dataset)) return Response(retList) except Exception as e: print(e) return Response("Couldn't retrieve recent datasets", status=400)
def popular(): try: retList = [] user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) # sorts the datasets by ascending order datasets = Dataset.objects.filter(Q(author=user) | Q( public=True)).order_by("-views")[:5] for dataset in datasets: if dataset == None: return Response("No datasets found", status=400) retList.append(DatasetService.createDatasetInfoObject(dataset)) return Response(retList) except: return Response("Couldn't retrieve popular datasets", status=400)
def get(self, searchQuery): datasets = [] browseURL = "browse" manageURL = "manage" referrerURL = request.headers["referer"].split('/')[-1] matchedDatasets = [] typeUser = None user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) try: if searchQuery == "" or searchQuery == " ": raise else: #Perform search only on user datasets if referrerURL == manageURL: userDatasets = Dataset.objects.filter(author=user) matchedDatasets = userDatasets.search_text( searchQuery).order_by('$text_score') typeUser = True #Perform search on all datasets elif referrerURL == browseURL: visibleDatasetsToUser = Dataset.objects.filter( Q(author=user) | Q(public=True)) matchedDatasets = visibleDatasetsToUser.search_text( searchQuery).order_by('$text_score') typeUser = False else: # invalid referrer url return Response( "Error processing search request. Please try again later.", status=400) for dataset in matchedDatasets: datasets.append( DatasetService.createDatasetInfoObject(dataset)) if typeUser: return Response({"datasets": datasets, "type": "user"}) return Response({"datasets": datasets, "type": "all"}) except: return Response( "Unable to retrieve datasets with the given search parameter.", status=400)
def getDataset(datasetId): user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) dataset = Dataset.objects.get(id=datasetId) if dataset == None: return Response( "Unable to retrieve dataset information. Please try again later.", status=400) if (dataset.public == False and dataset.author != user): return Response("You do not have permission to access that dataset.", status=403) Dataset.objects(id=datasetId).update_one(inc__views=1) AuthenticationService.updateRecentDatasets(request.cookies["SID"], datasetId) return Response( DatasetService.createDatasetInfoObject(dataset, withHeaders=True))
def recent(): try: retList = [] # use cookies to retrieve user user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) recentDatasetIds = user.recentDatasets[:5] # retrieve the actual datasets from these ids for datasetId in recentDatasetIds: try: retList.append( DatasetService.createDatasetInfoObject( Dataset.objects.get(id=datasetId))) except: continue return Response(retList) except Exception as e: return Response("Couldn't retrieve recent datasets", status=400)
def deleteDataset(datasetId): user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) dataset = Dataset.objects.get(id=datasetId) if dataset == None: return Response( "Unable to retrieve dataset information. Please try again later.", status=400) if (dataset.author != user): return Response("You do not have permission to delete that dataset.", status=403) try: s3.delete_object(Bucket="agriworks-user-datasets", Key=datasetId + ".csv") dataset.delete() return Response("Succesfully deleted dataset.", status=200) except: return Response("Unable to delete dataset.", status=500)
def createDataset(self, request, uploadTime): try: #keep track of when request was made user = AuthenticationService.verifySessionAndReturnUser( request.cookies["SID"]) if (not user): return {"message": "Invalid session", "status": 400} #TODO: verify that these parameters exist uploadedFile = request.files['file'] dataSetName = request.form.get("name") dataSetAuthor = user dataSetIsPublic = True if request.form.get( "permissions") == "Public" else False dataSetTags = request.form.get("tags").split(',') dataSetType = request.form.get("type") if (len(dataSetTags) == 1): if (dataSetTags[0] == ""): dataSetTags.pop() data = pd.read_csv(uploadedFile) keys = list(data.columns) if (data.isnull().values.sum() > 0): raise ValueError #Add new tags to collection for tag in dataSetTags: newTag = Tag(name=tag, datasetType=dataSetType) newTag.validate() if not self.tagExist(newTag): newTag.save() #Create and save dataset object dataset = Dataset(name=dataSetName, author=dataSetAuthor, keys=keys, public=dataSetIsPublic, tags=dataSetTags, datasetType=dataSetType, views=1) dataset.save() #Go back to the front of the file uploadedFile.seek(0) #Save to S3 self.uploadToAWS(dataset.id, uploadedFile) uploadCompletedDate = str(datetime.datetime.now()).split(".")[0] headline = f"Your <b>{dataset.name}</b> dataset has finished processing. <br> <br> " uploadString = f"<b>Upload Received</b>: {uploadTime} <br> <br> <b>Upload Completed</b>: {uploadCompletedDate}<br> <br>" datasetLink = f"<b> Link below to view your dataset: </b> <br> <a href ='{app.rootUrl}/dataset/{dataset.id}'>{app.rootUrl}/dataset/{dataset.id}</a>." formattedMessage = headline + uploadString + datasetLink MailService.sendMessage(user, "Dataset successfully uploaded", formattedMessage) return dataset except ValidationError as e: print(e) return None