def post(self,project,filename): try: self.set_header('Content-Type','application/json') installm=installpmodel(self.get_secure_cookie("crtcluster")) packegpathm=packagepathmodel(self.get_secure_cookie("crtcluster")) showpath=[] #read the install project chooseinstall=installm.installproject packetsp=packegpathm.packagepath #upload file if filename =="uploadfile": if self.request.files.get('uploadfile',None): uploadpath=packetsp[chooseinstall[-1]][project].split(',')[-1]+'/'+self.request.files['uploadfile'][0]['filename'] uploadfile=self.request.files['uploadfile'][0] with open(uploadpath,'w') as f: f.write(uploadfile['body']) self.redirect('/CustomConfigure') #save the xml file else: confpath=packetsp[chooseinstall[-1]][project].split(',')[-1]+'/'+filename.split('?')[0] if filename.split('?')[0].split('.')[-1] == 'xml': data=json.loads(self.request.body) con=HadoopConf(confpath) con.setdt2(data) else: with open(confpath,'w') as f: f.write(self.request.body) self.set_header("Content-Type","application/text") self.set_status(201) self.write("保存成功!") except Exception ,e: logging.error(e) self.set_header("Content-Type","application/text") self.set_status(500) self.write(e)
def post(self,filename): try: paths='static/configure/'+filename.split('?')[0] con=HadoopConf(paths) con.setdt2(data) self.set_header("Content-Type","application/text") self.set_status(201) self.write("保存成功!") except Exception, e: logging.error(e) self.set_header("Content-Type","application/text") self.set_status(201) self.write(e)
def get(self,projectname): if self.get_secure_cookie("crtcluster") is None: self.set_header("Content-Type","application/text") self.write("你应该先选择集群,再进行操作!") else: #获取相应的路径信息 installm=installpmodel(self.get_secure_cookie("crtcluster")) packegpathm=packagepathmodel(self.get_secure_cookie("crtcluster")) chooseinstall=installm.installproject packetsp=packegpathm.packagepath yarnpath=packetsp[chooseinstall[-1]][projectname].split(',')[-1]+'/yarn-site.xml' hdfspath=packetsp[chooseinstall[-1]][projectname].split(',')[-1]+'/hdfs-site.xml' clusterpath='cluster/'+self.get_secure_cookie("crtcluster") rtvalue={} resourcemanage="yarn.resourcemanager.address" nameservices="dfs.nameservices" defaultFS="fs.defaultFS" if os.path.isfile(yarnpath) and os.path.isfile(hdfspath): yarncon=HadoopConf(yarnpath) hdfscon=HadoopConf(hdfspath) yarndt=yarncon.get() hdfsdt=hdfscon.get() if hdfsdt.get(nameservices,None) is not None: rtvalue['nameservices']=hdfsdt.get(nameservices) else: rtvalue['nameservices']=HadoopConf(packetsp[chooseinstall[-1]][projectname].split(',')[-1]+'/core-site.xml').get().get(defaultFS) rtvalue['resourcemanager']=yarndt.get(resourcemanage,"") #列出安装失败的节点信息 clien=tornado.httpclient.AsyncHTTPClient() clusterurl=cluster().getclusterurl(self.get_secure_cookie('crtcluster')) response=yield tornado.gen.Task(clien.fetch,clusterurl[0]+'/geterrorhost') errorhost=json.loads(response.body) if len(errorhost) != 0: if os.path.isfile(clusterpath+"/hosts"): cmd="grep -E '"+"|".join(errorhost)+"' "+clusterpath+"/hosts" ips=runshcommand(cmd) #format ips:["ip\thost",......] if ips is not None: rtvalue['hosts']=[[x.split('\t')[0],x.split('\t')[1]] for x in ips] else: rtvalue['hosts']=[] else: rtvalue['hosts']=[] self.set_header('Content-Type','application/json') self.write(json.dumps(rtvalue)) self.finish() else: self.set_header('Content-Type','application/text') logging.error("path error:"+yarnpath+","+hdfspath) self.write("error!")
def get(self,project,filename): try: installm=installpmodel(self.get_secure_cookie("crtcluster")) packegpathm=packagepathmodel(self.get_secure_cookie("crtcluster")) chooseinstall=installm.installproject packetsp=packegpathm.packagepath if filename.split('.')[-1] == 'xml': self.set_header('Content-Type','application/json') showpath=[] #read the install project confpath=packetsp[chooseinstall[-1]][project].split(',')[-1]+'/'+filename con=HadoopConf(confpath) dt=con.get() self.write(json.dumps(dt)) else: self.set_header('Content-Type','application/text') datapath=packetsp[chooseinstall[-1]][project].split(',')[-1]+'/'+filename with open(datapath,'r') as f: self.write(f.read()) except Exception ,e: logging.error(e)
def get(self,filename): self.set_header("Content-Type", "application/json") paths='static/configure/'+filename con=HadoopConf(paths) dt=con.get() self.write(json.dumps(dt))