def on_btn_joblist_download_clicked(self, b): index = self.hginterface.grid_joblist.get_selected_rows() if len(index) == 0: return index = index[0] jobid = self.hginterface.grid_joblist.df.iloc[[index]].index[0] item = self.joblist_df.loc[jobid] if item.jobstatus != 'Done': return filename = "Result" + str(jobid) + '(' + item.shapeName + ',' + item.dateFrom + '~' + item.dateTo + ').csv' filename = filename.replace('/', '-') jobid = str(jobid) ret_path = self.WORKING_DIR + '/job/' + jobid + '/results/' csv_filename = '' if item.dataType == 'SMAP': csv_filename = str(os.path.basename(glob.glob(ret_path + '*.zip')[0])) else: csv_filename = str(os.path.basename(glob.glob(ret_path + '*.csv')[0])) # with open(csv_filename, 'r') as f: # data = f.read() # HGUtil.downloadToClient(data, csv_filename) HGUtil.startDownloadToClient('./workingdir/job/' + jobid + '/results/' + csv_filename)
def deleteJob(self, jobid): #get submit id job_info = self.db.getJobInfo(jobid) if job_info == None: return # delete job files HGUtil.rmdir(self.JOB_BASEDIR + '/' + str(jobid)) # delete job from DB self.db.deleteJob(jobid)
def updateJobStatus(self, jobid, status, submit_id): sql = 'update HGJobs set jobstatus = "%s" where jobid = "%s";' % ( status, str(jobid)) conn = sqlite3.connect(self.DB_FILE) conn.execute(sql) conn.commit() conn.close() param = {} param['submitId'] = submit_id HGUtil.sendLog(HGLOG.JOB_STATUS, param, status) return True
def initTable(self): sql = ''' CREATE TABLE HGJobs ( jobid INTEGER PRIMARY KEY AUTOINCREMENT, submitId TEXT, fileName TEXT, shapeName TEXT, dataType TEXT, dateFrom TEXT, dateTo TEXT, submitTime TEXT, jobstatus TEXT, jobname TEXT, joblog TEXT ); ''' #create job folder first HGUtil.mkdir(HGUtil.WORKING_DIR) HGUtil.mkdir(HGUtil.WORKING_DIR + '/job') conn = sqlite3.connect(self.DB_FILE) conn.execute(sql) conn.close()
def main(self): warnings.filterwarnings('ignore') #self.hginterface.show_init_loading() start_t = time.time() #return # HGUtil.getNBBaseUrl() # disable Jupyter autosave display(Javascript('Jupyter.notebook.set_autosave_interval(0);')) # create base folders HGUtil.mkdir(self.WORKING_DIR) HGUtil.mkdir(self.WORKING_DIR + "/upload") # for user-uploaded zip file HGUtil.mkdir(self.WORKING_DIR + "/job") # for job submission and results # create link to working dir # HGUtil.createWorkLink() # create interfaces self.hginterface.init() self.hginterface.file_widget.cb = self.file_loaded self.hginterface.fetchDataButton.on_click(self.on_fetch_button_clicked) self.hginterface.visData.observe(self.showVisDataInformation, "value") self.hginterface.btn_vis_del.on_click(self.on_vis_del_button_clicked) self.hginterface.btn_vis_vis.on_click(self.on_vis_vis_button_clicked) self.updateVisData() self.hginterface.sortCheckBox.observe(self.toggleSort, "value") self.hginterface.btn_joblist_download.on_click(self.on_btn_joblist_download_clicked) self.hginterface.btn_joblist_delete.on_click(self.on_btn_joblist_delete_clicked) self.hginterface.btn_joblist_refresh.on_click(self.on_btn_joblist_refresh_clicked) self.hginterface.btn_joblist_visualize.on_click(self.on_btn_joblist_vis_clicked) self.hginterface.btn_newjob_reupload.on_click(self.on_btn_newjob_reupload_clicked) self.hginterface.dataType.observe(self.onDataTypeChanged) self.hginterface.tab.observe(self.tabChanged, "selected_index") HGUtil.JOBLOG_FUNC = self.hginterface.print_joblog HGUtil.HG_INTERFACE = self.hginterface self.hginterface.hideEditAppBtn() self.initialized = True
def unzip_and_checkfiles(self, path, file_name): mandatory = ['shp', 'dbf', 'prj', 'shx'] shape_name = None #fname_only = os.path.splitext(file_name)[0] fname_only = file_name.split('.zip')[0] os.system('rm -rf ' + path + fname_only) HGUtil.mkdir(path + fname_only) HGUtil.mkdir(path + fname_only + '/temp') os.system('unzip ' + path + file_name + ' -d ' + path + fname_only + '/temp') for files in os.walk(path + fname_only + '/temp'): check_cnt = 0 for file in files[2]: if file[0] == '.': continue filename, file_extension = os.path.splitext(file) #print filename, file_extension if file_extension[1:] in mandatory: shape_name = filename check_cnt += 1 if check_cnt == len(mandatory): HGUtil.mkdir(path + fname_only + '/shape') os.system('mv ' + files[0] + '/* ' + path + fname_only + '/shape/.') os.system('rm -rf ' + path + fname_only + '/temp') # get shape information shape_bound = HGUtil.getShapeBound(path + fname_only + '/shape/' + shape_name + '.shp') self.hginterface.draw_preview(shape_bound) return shape_name # failed. Delete temp folders os.system('rm -rf ' + path + fname_only) return None
def print(*args, **kw): HGUtil.JOBLOG_FUNC(*args, **kw)
def submitJob(self): # get new job id form DB jobid = self.db.createNewJob() if jobid == None: return False, 'Job creation failure' #jobid = '1' # create job folders jobdir = self.JOB_BASEDIR + jobid HGUtil.rmdir(jobdir) HGUtil.mkdir(jobdir) # cd to job dir since 'submit' script returns job result here os.chdir(jobdir) waltime = self.DEFAULT_WALTIME HGUtil.mkdir('shape') upload_dir = self.INPUT_BASEDIR + self.params['input_fname'] + '/shape/' input_dir = self.JOB_BASEDIR + jobid + '/shape' # print('cp "' + upload_dir + '"* "' + input_dir + '".') os.system('cp "' + upload_dir + '"* "' + input_dir + '/".') shape_filename = str(os.path.basename(glob.glob(input_dir + '/*.shp')[0])) # update db params = {} params['jobname'] = self.params['jobname'] params['fileName'] = self.params['input_fname'] params['shapeName'] = shape_filename[:-4] if self.params['data_type'] == 'GPM': self.params['data_type'] = 'GPM (' + self.params['temporal_res'] + ')' params['dataType'] = self.params['data_type'] params['dateFrom'] = self.params['data_from'] params['dateTo'] = self.params['data_to'] ret = self.db.updateJobInfo(jobid, params) # submit job jobcmd = 'submit -w ' + str(waltime) + \ ' -v ssg-workq@halstead --detach' + \ ' --inputfile ' + input_dir + \ ' ' + self.TOOL_REV + \ ' -r "' + self.params['data_type'] + '"'\ ' -df ' + self.params['data_from'] + \ ' -dt ' + self.params['data_to'] + \ ' -f ' + shape_filename # self.print_joblog('\n> Job submission command : ') # self.print_joblog(jobcmd) # return True, '' submitHydroglobe.run(repo = self.params['data_type'], dateFrom = self.params['data_from'] , dateTo = self.params['data_to'], shapeFileName = shape_filename, currentDir=jobdir) #return True, '' # get job submission id submit_id = '1234' # self.print_joblog('\n> Job has been submitted successfully. Cluster job submission id : ' + submit_id) params = {} params['submitId'] = submit_id if len(glob.glob(HGUtil.WORKING_DIR + '/job/' + str(jobid) + '/results/*.csv')) > 0: params['jobstatus'] = 'Done' else: params['jobstatus'] = 'Failed' params['joblog'] = HGUtil.HG_INTERFACE.get_joblog() self.db.updateJobInfo(jobid, params) return True, ''