def changeUsername(): newUsername = request.json['newUsername'] id = request.json['id'] if GoogleUser.query.filter_by(username=newUsername).first() is None: user = GoogleUser.query.filter_by(id=id).first() dbSession.query(GoogleUser).filter_by(id=id).update({'username': newUsername}) dbSession.commit() return {**getUser(user), **{'updatedUsername': newUsername, 'success': True}} else: user = GoogleUser.query.filter_by(id=id).first() return {**getUser(user), **{'updatedUsername': "", 'success': False}}
def approveData(): userId = request.json['userId'] datasetId = request.json['datasetId'] dataId = request.json['dataId'] approvalStatus = request.json['approvalStatus'] dataReview = DataReview(datasetId=datasetId, userId=userId, dataId=dataId, approvalStatus=approvalStatus) dataReview.saveToDB() if approvalStatus: DatasetData.query.filter_by(id=dataId).first().approvals += 1 dbSession.commit() return {'success': True}
def updateDB(): notUpdatedData = DatasetData.query.filter_by(loaded=False).all() blobServiceClient = BlobServiceClient.from_connection_string(os.getenv('AZURE_CONNECTION_STRING')) containerName = "datasets" for data in notUpdatedData: if data.approvals >= 4: blobClient = blobServiceClient.get_blob_client(container=containerName, blob=data.datasetId + data.fileType) downloadPath = TEMP_FOLDER + data.datasetId + data.fileType folder = open(downloadPath, "w+") folder.write(blobClient.download_blob().readall().decode("utf-8")) folder.close() rowsToAppend = [] with open(downloadPath, newline='') as f: reader = csv.reader(f) for row in reader: rowsToAppend.append(row) f = open(downloadPath, "w+") f.close() rowsToAppend.append(data.data) with open(downloadPath, "a", encoding='utf-8') as f: writer = csv.writer(f) writer.writerows(rowsToAppend) df = pd.read_csv(downloadPath) df.to_csv(downloadPath, index=False) uploadFile(downloadPath) filePath = pathlib.Path(downloadPath) filePath.unlink() data.loaded = True dbSession.commit()
def saveToDB(self): dbSession.add(self) dbSession.commit()