def updateRED(self, action, id, **kwargs): user = cherrypy.session['user']['name'] app = cherrypy.request.path_info.split('/')[3] try: exportJob = HDFSExport.get(id) allowed_props = ('cron_schedule', 'parallel_searches', 'compress_level') update_props = dict() for k, v in kwargs.items(): if k in allowed_props: update_props[k] = v if len(update_props): exportJob.update(update_props) if exportJob.entity and exportJob.entity.owner: exportJob.entity.owner = 'nobody' if not exportJob.passive_save(): cherrypy.response.status = 500 return self.render_json( dict(success=False, errors=[str(e) for e in exportJob.errors])) return self.render_json(dict(success=True)) except: raise cherrypy.HTTPError(status=404, message="Export Job not found")
def submit(self, action, **kwargs): '''Accept data to setup the Splunk HDFS app''' app = cherrypy.request.path_info.split('/')[3] user = cherrypy.session['user']['name'] #remove the csrf protection...(perhaps this should be done for us? del kwargs['splunk_form_key'] #TODO make this a lib function or static method on the model and replace this and the one in the defaultExports.saveJob keys = kwargs.keys() partitions = [] for k in keys: if k.startswith( 'partition_' ): # and splunk.util.normalizeBoolean(kwargs.get(k, 'f')): partitions.append(k[len('partition_'):]) del kwargs[k] kwargs['partition_fields'] = ','.join( partitions) if len(partitions) > 0 else 'None' kwargs['search'] = 'This should not get saved' defaultExport = HDFSExport(app, user, 'default', **kwargs) defaultExport.metadata.sharing = 'app' defaultExport.metadata.owner = 'nobody' defaultExport.metadata.app = app logger.info("Submitted setup form with params: %s" % kwargs) if defaultExport.passive_save(): if app_util.is_xhr(): cherrypy.response.status = 200 return "" raise cherrypy.HTTPRedirect( self.make_url(['app', app, 'config_clusters']), 303) if app_util.is_xhr(): cherrypy.response.status = 404 return self.render_template( '/%s:/templates/export_defaults.html' % app, dict(form_content='fomasdafe', app=app, defaultExport=defaultExport))
def details(self, action, id=None, **kwargs): '''show the details of the export job''' app = cherrypy.request.path_info.split('/')[3] try: exportJob = HDFSExport.get(id) except: logger.warn('Could not find export job: %s' % id) #TODO: return something meaningful here return self.render_template('/%s:/templates/details.html' % app, dict(app=app, id=id, exportJob=exportJob))
def saveRED(self, user, app, **kwargs): '''Save the scheduled HDFS export, return a tuple of the success and the modal''' keys = kwargs.keys() partitions = [] for k in keys: if k.startswith( 'partition_' ): # and splunk.util.normalizeBoolean(kwargs.get(k, 'f')): partitions.append(k[len('partition_'):]) del kwargs[k] kwargs['partition_fields'] = ','.join( partitions) if len(partitions) > 0 else 'None' exportJob = HDFSExport(app, user, **kwargs) exportJob.metadata.sharing = 'app' exportJob.metadata.owner = user exportJob.metadata.app = app logger.info("ExportJob errors: %s" % exportJob.errors) return (exportJob.passive_save(), exportJob)
def edit(self, action, id=None, **kwargs): app = cherrypy.request.path_info.split('/')[3] user = cherrypy.session['user']['name'] exportJob = HDFSExport.get(id) search = exportJob.search clusters = Cluster.all().filter_by_app(app) return self.render_template( '/%s:/templates/edit_export_job.html' % app, dict(app=app, search=search, exportJob=exportJob, clusters=clusters))
def renderList(self, exportJob=None, listErrors=None, **kwargs): app = cherrypy.request.path_info.split('/')[3] user = cherrypy.session['user']['name'] exportJobs = HDFSExport.all().filter_by_app(app) exportJob = exportJob or self.getNewHDFSExport(app, user) clusters = Cluster.all().filter_by_app(app) template_args = dict(form_content='form_content', app=app, exportJobs=exportJobs, exportJob=exportJob, listErrors=listErrors, clusters=clusters) return self.render_template('/%s:/templates/list_hdfs_jobs.html' % app, template_args)
def custom_action(self, action, id): app = cherrypy.request.path_info.split('/')[3] done = False try: exportJob = HDFSExport.get(id) done = getattr(exportJob, action)() #do we need generic exception here? except Exception as e: logger.warn('Could not %s export job: %s, %s' % (action, id, str(e))) if done: return self.render_json(dict(success=True)) return self.render_json( dict(success=False, error='Could not %s export job: %s' % (action, id)))
def getNewHDFSExport(self, app, user): #this is a workaround for external REST handlers not supporting _new id = HDFSExport.build_id('_new_ext', app, user) h = HDFSExport.get(id) h.name = '' return h