def postFile(request, pdsKey, index='0'): """ Handler to upload data. Validates the key and index, and dumps the raw content of the request to a file. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user # Ensure the table index is numeric if not index.isdigit(): return jsonResponse({ 'status': 'error', 'error': 'Invalid table index: ' + index }) # Ensure the key is valid to prevent a malicious user from # trying to write to 'uploadedData/raw/../../foo' pds = PendingDataSource.objects.get(key=str(pdsKey), user=user) if not os.path.exists('uploadedData/raw/'): os.makedirs('uploadedData/raw/') with open("uploadedData/raw/%s-%s" % (pds.key, index), 'w') as f: f.write(request.body) return jsonResponse({'status': 'success'})
def dsCreate(request): """View Handler for creating data sources.""" clientDsObj = json.loads(request.body) try: result = createDataSource(request, clientDsObj) if result['status'] == 'success': return jsonResponse({'key': result['key']}) elif result['status'] == 'error': return jsonResponse(result['error'], status = 400) except RedirectRequired as red: return jsonResponse({'redirect': red.url})
def tutorialComplete(request): """Django handler for completion of the tutorial.""" body = json.loads(request.body) TutorialCompletion(user=request.user, type=body['type']).save() return jsonResponse({})
def dsPendingDelete(request, pendingDsKey): # user may be null, this is acceptable pds = PendingDataSource.objects.get(key=str(pendingDsKey), user=request.user) pds.delete() return jsonResponse({'status': 'success'})
def dsPendingCreate(request): """View Handler for creating pending datasources.""" dataSourceParamsJson = json.loads(request.body) pendingDs = PendingDataSource(params_json=dataSourceParamsJson) pendingDs.save() return jsonResponse({'key': pendingDs.key})
def tutorialComplete(request): """Django handler for completion of the tutorial.""" body = json.loads(request.body) TutorialCompletion( user=request.user, type=body['type'] ).save() return jsonResponse({})
def dashCreate(request): clientDbObj = json.loads(request.body) dash = Dashboard(name=str(clientDbObj['name']), spec_json=json.dumps(clientDbObj['spec']), user=request.user) dash.save() createTableRefs(dash, list(clientDbObj['dataCollection'])) return jsonResponse({'key': dash.key})
def dashCreate(request): clientDbObj = json.loads(request.body) dash = Dashboard( name = str(clientDbObj['name']) , spec_json = json.dumps(clientDbObj['spec']) , user = request.user ) dash.save() createTableRefs(dash, list(clientDbObj['dataCollection'])) return jsonResponse({'key': dash.key})
def previewCsv(request, key, index='0'): """ Handler to get a preview of an uploaded file. This preview is shown to the user using SlickGrid in order to verify that the file is being parsed correctly. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user data = json.loads(request.body) # Ensure the table index is numeric if not index.isdigit(): return jsonResponse({'status': 'error', 'error': 'Invalid table index: ' + index}) pds = PendingDataSource.objects.get(key=str(key), user=user) with open("uploadedData/raw/%s-%s" % (pds.key, index), 'rU') as f: parsed = parseForPreview(f, data) return jsonResponse(parsed)
def tableQuery(request, _): """View Handler for performing a query on a table.""" spec = json.loads(urllib.unquote(request.POST.get('spec', None))) assert spec, "Invalid query specification!" tableName, dsKey = getDsInfo(spec)[0] limit = int(request.POST.get('limit', 1000)) try: connection = getConnection(request, dsKey) result = runConnectionMethod( connection, 'queryTable', tableName, spec, limit ) except ValueError as err: if len(err.args) > 0: return jsonResponse({'message': str(err.args[0])}, status=500) return jsonResponse({'message': None}, status=500) return jsonResponse(result) #saneEncoded already in queryTable
def tableMeta(request, dsKey): """View Handler for returning table meta data.""" tableName = request.POST.get('tableName', None) columnExpr = json.loads(urllib.unquote(request.POST.get('columnExpr', None))) dataType = request.POST.get('type', None) if None in [tableName, columnExpr, dataType]: raise ValueError("views.dataSource.tableMeta", "Invalid request; missing data.") try: connection = getConnection(request, dsKey) result = runConnectionMethod( connection, 'getColumnMetadata', tableName, columnExpr, dataType ) except ValueError as err: if len(err.args) > 0: return jsonResponse({'message': str(err.args[0])}, status=500) return jsonResponse({'message': None}, status=500) return jsonResponse(saneEncode(result))
def dsList(request): """View Handler to list data sources.""" # pylint: disable = E1101 # Pylint does not recognize Django models as having 'objects' attribute. dataSources = DataSource.objects.filter(user=request.user) result = [ { 'key': ds.key , 'name': ds.name , 'type': ds.type } for ds in dataSources] return jsonResponse({'dataSources': result})
def postFile(request, pdsKey, index='0'): """ Handler to upload data. Validates the key and index, and dumps the raw content of the request to a file. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user # Ensure the table index is numeric if not index.isdigit(): return jsonResponse({'status': 'error', 'error': 'Invalid table index: ' + index}) # Ensure the key is valid to prevent a malicious user from # trying to write to 'uploadedData/raw/../../foo' pds = PendingDataSource.objects.get(key=str(pdsKey), user=user) if not os.path.exists('uploadedData/raw/'): os.makedirs('uploadedData/raw/') with open("uploadedData/raw/%s-%s" % (pds.key, index), 'wU') as f: f.write(request.body) return jsonResponse({'status': 'success'})
def sshFileExists(request): clientDsObj = json.loads(request.body) username = clientDsObj['username'] host = clientDsObj['host'] port = int(clientDsObj['port']) privateKey = clientDsObj['privateKey'] filePath = clientDsObj['filePath'] socket = bool(clientDsObj.get('isSocket', False)) if not validate.linuxUsername(username): return jsonResponse({'status': 'connFailed', 'invalidField': 'username'}) if not validate.hostname(host): return jsonResponse({'status': 'connFailed', 'invalidField': 'host'}) if not validate.filepath(filePath): return jsonResponse({'status': 'connFailed', 'invalidField': 'filePath'}) # 0600 permissions required by ssh keyPath = getNewTempFilePath() with os.fdopen(os.open(keyPath, os.O_WRONLY | os.O_CREAT, 0600), 'w') as f: f.write(privateKey)
def previewCsv(request, key, index='0'): """ Handler to get a preview of an uploaded file. This preview is shown to the user using SlickGrid in order to verify that the file is being parsed correctly. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user data = json.loads(request.body) # Ensure the table index is numeric if not index.isdigit(): return jsonResponse({ 'status': 'error', 'error': 'Invalid table index: ' + index }) pds = PendingDataSource.objects.get(key=str(key), user=user) with open("uploadedData/raw/%s-%s" % (pds.key, index), 'r') as f: parsed = parseForPreview(f, data) return jsonResponse(parsed)
def sshKeygen(request): keyFilePath = getNewTempFilePath() subprocess.check_output( [ 'ssh-keygen' , '-q' , '-t', 'rsa' , '-f', keyFilePath , '-N', '' , '-C', 'polychart' ]) with open(keyFilePath) as f: privateKey = f.read() with open(keyFilePath + '.pub') as f: publicKey = f.read() deleteOnExit(keyFilePath + '.pub') return jsonResponse({'privateKey': privateKey, 'publicKey': publicKey})
def dsDelete(request, dsKey): """View Handler to delete data sources.""" # pylint: disable = E1101 # Pylint does not recognize Django models as having 'objects' attribute. ds = DataSource.objects.get(key=str(dsKey), user=request.user) # Avoid leaving behind broken data sources refs = DashboardDataTable.objects.filter(data_source=ds) dashs = [] for r in refs: if r.dashboard not in dashs: dashs.append(r.dashboard) refs.delete() for dash in dashs: dash.delete() ds.delete() return jsonResponse({})
def getKey(request): """ Provides a key to upload to. This structure will be useful if/when we enable chunking of uploads. If data contains a key already, we're uploading a new version of the dataset. This functionality is unused for the moment. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user data = json.loads(request.body) pds = PendingDataSource.objects.create(user = user, params_json = data) if 'key' in data: try: lds = LocalDataSource.objects.get(user=user, datasource__key=data['key']) lds.pendingdatasource = pds lds.save() except LocalDataSource.DoesNotExist: pass pds.params_json['key'] = pds.key pds.save() return jsonResponse({'key': pds.key})
def getKey(request): """ Provides a key to upload to. This structure will be useful if/when we enable chunking of uploads. If data contains a key already, we're uploading a new version of the dataset. This functionality is unused for the moment. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user data = json.loads(request.body) pds = PendingDataSource.objects.create(user=user, params_json=data) if 'key' in data: try: lds = LocalDataSource.objects.get(user=user, datasource__key=data['key']) lds.pendingdatasource = pds lds.save() except LocalDataSource.DoesNotExist: pass pds.params_json['key'] = pds.key pds.save() return jsonResponse({'key': pds.key})
def dashUpdate(request, dbKey): redirectUrl = None clientDashObj = {} if request.META['CONTENT_TYPE'] and request.META[ 'CONTENT_TYPE'].startswith("multipart"): clientDashObj = json.loads(request.POST['data'][0]) redirectUrl = request.POST['redirect'][0] else: clientDashObj = json.loads(request.body) dash = Dashboard.objects.get(key=str(dbKey), user=request.user) if 'name' in clientDashObj: dash.name = str(clientDashObj['name']) dash.spec_json = json.dumps(clientDashObj['spec']) dash.save() deleteTableRefs(dash) createTableRefs(dash, list(clientDashObj['dataCollection'])) if redirectUrl: return redirect(redirectUrl) else: return jsonResponse({})
def dashUpdate(request, dbKey): redirectUrl = None clientDashObj = {} if request.META['CONTENT_TYPE'] and request.META['CONTENT_TYPE'].startswith("multipart"): clientDashObj = json.loads(request.POST['data'][0]) redirectUrl = request.POST['redirect'][0] else: clientDashObj = json.loads(request.body) dash = Dashboard.objects.get(key=str(dbKey), user=request.user) if 'name' in clientDashObj: dash.name = str(clientDashObj['name']) dash.spec_json = json.dumps(clientDashObj['spec']) dash.save() deleteTableRefs(dash) createTableRefs(dash, list(clientDashObj['dataCollection'])) if redirectUrl: return redirect(redirectUrl) else: return jsonResponse({})
def cleanCsv(request, key): """ Handler to clean uploaded CSV data. Unlike previewCsv, this function expects a parsing config for all of the tables. It parses each in turn, and dumps the whole list into the database in JSON format. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user data = json.loads(request.body) pds = PendingDataSource.objects.get(key=str(key),user=user) (lds, _) = LocalDataSource.objects.get_or_create(pendingdatasource=pds) lds.json = [] for index, format in enumerate(data['tables']): with open("uploadedData/raw/%s-%s" % (pds.key, index), 'rU') as f: lds.json.append(parseForPolychart(f, format)) lds.save() return jsonResponse({'status': 'success'})
def dashExportCode(request): """ Handler function to produce session code so that the user may be redirected to a unique URL for the export task to take place. Args: request: Django request object; should contain the serialized dashboard specification in request.POST. Returns: A unique code corresponding to the key used to store the POST data in the request session object. """ if not settings.EXPORT_SERVICE_PORT: raise ValueError( 'Received an export request, but exporting is not enabled') exportRequest = json.loads(request.body) serial = exportRequest['serial'] exportType = exportRequest['exportType'] code = randomCode() request.session[code] = {'serial': serial, 'exportType': exportType} return jsonResponse({'code': code})
def cleanCsv(request, key): """ Handler to clean uploaded CSV data. Unlike previewCsv, this function expects a parsing config for all of the tables. It parses each in turn, and dumps the whole list into the database in JSON format. """ # pylint: disable = E1101 # Pylint does not notice Django model attributes. user = request.user data = json.loads(request.body) pds = PendingDataSource.objects.get(key=str(key), user=user) (lds, _) = LocalDataSource.objects.get_or_create(pendingdatasource=pds) lds.json = [] for index, format in enumerate(data['tables']): with open("uploadedData/raw/%s-%s" % (pds.key, index), 'rU') as f: lds.json.append(parseForPolychart(f, format)) lds.save() return jsonResponse({'status': 'success'})
def dashExportCode(request): """ Handler function to produce session code so that the user may be redirected to a unique URL for the export task to take place. Args: request: Django request object; should contain the serialized dashboard specification in request.POST. Returns: A unique code corresponding to the key used to store the POST data in the request session object. """ if not settings.EXPORT_SERVICE_PORT: raise ValueError('Received an export request, but exporting is not enabled') exportRequest = json.loads(request.body) serial = exportRequest['serial'] exportType = exportRequest['exportType'] code = randomCode() request.session[code] = { 'serial': serial , 'exportType': exportType} return jsonResponse({'code': code})
def dashDelete(request, dbKey): dash = Dashboard.objects.get(key=str(dbKey), user=request.user) deleteTableRefs(dash) dash.delete() return jsonResponse({})
def dashList(request): dashboards = Dashboard.objects.filter(user=request.user) result = [{'key': dash.key, 'name': dash.name} for dash in dashboards] return jsonResponse({'dashboards': result})
return jsonResponse({'status': 'connFailed', 'invalidField': 'filePath'}) # 0600 permissions required by ssh keyPath = getNewTempFilePath() with os.fdopen(os.open(keyPath, os.O_WRONLY | os.O_CREAT, 0600), 'w') as f: f.write(privateKey) def runRemoteCmd(cmd): return subprocess.call( [ 'ssh' , '-2' , '-o', 'StrictHostKeyChecking=no' , '-o', 'PasswordAuthentication=no' , '-p', str(port) , '-i', keyPath , '%s@%s' % (username, host) , cmd ]) if runRemoteCmd('echo -n') != 0: return jsonResponse({'status': 'connFailed'}) if socket: fileTypeTest = 'S' else: fileTypeTest = 'f' if runRemoteCmd('test -%s "%s"' % (fileTypeTest, filePath)) != 0: return jsonResponse({'status': 'notFound'}) else: return jsonResponse({'status': 'found'})
def tableList(request, dsKey): """View Handler for listing tables in a dashboard.""" connection = getConnection(request, dsKey) tables = runConnectionMethod(connection, 'listTables') return jsonResponse(saneEncode(tables))