import os import sys from libpredweb import myfunc from libpredweb import webserver_common as webcom rundir = os.path.dirname(__file__) basedir = os.path.realpath("%s/../" % (rundir)) if 0: #{{{ infile = sys.argv[1] li = myfunc.ReadIDList2(infile, 2, None) print(li) #}}} if 0: #{{{ rawseq = ">1\nseqAAAAAAAAAAAAAAAAAAAAAAAAA\n \n>2 dad\ndfasdf " #rawseq = " >1\nskdfaskldgasdk\nf\ndadfa\n\n\nadsad \n" #rawseq = ">sadjfasdkjfsalkdfsadfjasdfk" rawseq = "asdkfjasdg asdkfasdf\n" seqRecordList = [] myfunc.ReadFastaFromBuffer(rawseq, seqRecordList, True, 0, 0) print(seqRecordList) #}}} if 0: #{{{ size = float(sys.argv[1]) print("size=", size) print("humansize=", myfunc.Size_byte2human(size)) #}}} if 1: newsfile = "%s/static/doc/news.txt" % (basedir)
def main(g_params):#{{{ submitjoblogfile = "%s/submitted_seq.log"%(path_log) runjoblogfile = "%s/runjob_log.log"%(path_log) finishedjoblogfile = "%s/finished_job.log"%(path_log) if not os.path.exists(path_cache): os.mkdir(path_cache) loop = 0 while 1: if os.path.exists("%s/CACHE_CLEANING_IN_PROGRESS"%(path_result)):#pause when cache cleaning is in progress continue # load the config file if exists configfile = "%s/config/config.json"%(basedir) config = {} if os.path.exists(configfile): text = myfunc.ReadFile(configfile) config = json.loads(text) if rootname_progname in config: g_params.update(config[rootname_progname]) if os.path.exists(black_iplist_file): g_params['blackiplist'] = myfunc.ReadIDList(black_iplist_file) os.environ['TZ'] = g_params['TZ'] time.tzset() avail_computenode = webcom.ReadComputeNode(computenodefile) # return value is a dict g_params['vip_user_list'] = myfunc.ReadIDList2(vip_email_file, col=0) num_avail_node = len(avail_computenode) webcom.loginfo("loop %d"%(loop), gen_logfile) isOldRstdirDeleted = False if loop % g_params['STATUS_UPDATE_FREQUENCY'][0] == g_params['STATUS_UPDATE_FREQUENCY'][1]: qdcom.RunStatistics_basic(webserver_root, gen_logfile, gen_errfile) isOldRstdirDeleted = webcom.DeleteOldResult(path_result, path_log, gen_logfile, MAX_KEEP_DAYS=g_params['MAX_KEEP_DAYS']) webcom.CleanServerFile(path_static, gen_logfile, gen_errfile) if 'DEBUG_ARCHIVE' in g_params and g_params['DEBUG_ARCHIVE']: webcom.loginfo("Run ArchiveLogFile, path_log=%s, threshold_logfilesize=%d"%(path_log, threshold_logfilesize), gen_logfile) webcom.ArchiveLogFile(path_log, threshold_logfilesize=threshold_logfilesize) qdcom.CreateRunJoblog(loop, isOldRstdirDeleted, g_params) # Get number of jobs submitted to the remote server based on the # runjoblogfile runjobidlist = myfunc.ReadIDList2(runjoblogfile,0) remotequeueDict = {} for node in avail_computenode: remotequeueDict[node] = [] for jobid in runjobidlist: rstdir = "%s/%s"%(path_result, jobid) remotequeue_idx_file = "%s/remotequeue_seqindex.txt"%(rstdir) if os.path.exists(remotequeue_idx_file): content = myfunc.ReadFile(remotequeue_idx_file) lines = content.split('\n') for line in lines: strs = line.split('\t') if len(strs)>=5: node = strs[1] remotejobid = strs[2] if node in remotequeueDict: remotequeueDict[node].append(remotejobid) cntSubmitJobDict = {} # format of cntSubmitJobDict {'node_ip': [INT, INT, STR]} for node in avail_computenode: queue_method = avail_computenode[node]['queue_method'] num_queue_job = len(remotequeueDict[node]) if num_queue_job >= 0: cntSubmitJobDict[node] = [num_queue_job, g_params['MAX_SUBMIT_JOB_PER_NODE'], queue_method] else: cntSubmitJobDict[node] = [g_params['MAX_SUBMIT_JOB_PER_NODE'], g_params['MAX_SUBMIT_JOB_PER_NODE'], queue_method] # entries in runjoblogfile includes jobs in queue or running hdl = myfunc.ReadLineByBlock(runjoblogfile) if not hdl.failure: lines = hdl.readlines() while lines != None: for line in lines: strs = line.split("\t") if len(strs) >= 11: jobid = strs[0] email = strs[4] try: numseq = int(strs[5]) except: numseq = 1 try: numseq_this_user = int(strs[10]) except: numseq_this_user = 1 rstdir = "%s/%s"%(path_result, jobid) finishtagfile = "%s/%s"%(rstdir, "runjob.finish") status = strs[1] webcom.loginfo("CompNodeStatus: %s"%(str(cntSubmitJobDict)), gen_logfile) runjob_lockfile = "%s/%s/%s.lock"%(path_result, jobid, "runjob.lock") if os.path.exists(runjob_lockfile): msg = "runjob_lockfile %s exists, ignore the job %s" %(runjob_lockfile, jobid) webcom.loginfo(msg, gen_logfile) continue #if IsHaveAvailNode(cntSubmitJobDict): if not g_params['DEBUG_NO_SUBMIT']: qdcom.SubmitJob(jobid, cntSubmitJobDict, numseq_this_user, g_params) qdcom.GetResult(jobid, g_params) # the start tagfile is written when got the first result qdcom.CheckIfJobFinished(jobid, numseq, email, g_params) lines = hdl.readlines() hdl.close() myfunc.WriteFile("sleep for %d seconds\n"%(g_params['SLEEP_INTERVAL']), gen_logfile, "a", True) time.sleep(g_params['SLEEP_INTERVAL']) loop += 1 return 0
""" # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.realpath(__file__)) from libpredweb import myfunc from libpredweb import webserver_common as webcom try: from .shared_settings import * except ImportError: pass with open('/etc/django_pro_secret_key.txt') as f: SECRET_KEY = f.read().strip() # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = [] allowed_host_file = "%s/allowed_host_pro.txt"%(BASE_DIR) computenodefile = "%s/pred/config/computenode.txt"%(BASE_DIR) for f in [allowed_host_file, computenodefile]: if os.path.exists(f): ALLOWED_HOSTS += myfunc.ReadIDList2(f,col=0) # add also the host ip address hostip = webcom.get_external_ip() ALLOWED_HOSTS.append(hostip)