def __make_common_files(command_line, codegen_service): """Generate the files that are common to both the client and server.""" # The first entry is the template. # The second entry is the name of the file being created. common_files = [[project(), ".project"], [properties(), "project.properties"], [AndroidManifest(), "AndroidManifest.xml"], [AndroidStrings(), os.path.normpath("res/values/strings.xml")], [AndroidLayoutMain(), os.path.normpath("res/layout/main.xml")], [classpath(), ".classpath"] ] projects = ["Client"] if not command_line.client_only: projects.append("Service") for client_or_service_string in projects: for template, subfilename in common_files: template.client_or_service = client_or_service_string filename = os.path.join(client_or_service_string, subfilename) __make_target_file(template, filename, command_line, codegen_service) temp = ServiceMain() sub_path0 = "src" sub_path1 = __get_well_known_name_path(command_line) filename = os.path.join("Service", sub_path0, sub_path1, "ServiceMain.java") __make_target_file(temp, filename, command_line, codegen_service) temp = ClientMain() filename = os.path.join("Client", sub_path0, sub_path1, "ClientMain.java") __make_target_file(temp, filename, command_line, codegen_service) return
def get(self): self.props_file = self.util.get_resource(self.props_fname) if not os.access(self.props_file, os.W_OK): shome = os.getenv("HOME") + "/.shredder/" if not os.path.exists(shome): os.makedirs(shome) self.props_file = shome + self.props_fname self.props = properties(self.props_file) for i in range(0, self.props.size()): self.translate(self.props.get(i)) self.props.read()
def __init__(self): self.parametry = properties() self.usedTerminals =[] #Gramatyka w postaci slownika #self.G = {"S":[classifier("AB","S"),classifier("AC","S")],"A":[classifier("BA","A"),classifier("a","A")],"B":[classifier("CC","B"),classifier("b","B")],"C":[classifier("AB","C"),classifier("a","C")]} self.G = {"S":[classifier("AB","S"),classifier("AC","S")],"C":[classifier("SB","C"),classifier("a","C")],"B":[classifier("BB","B"),classifier("b","B")],"A":[classifier("a","A")]} self.classifierNum=0 for x in self.G: for y in self.G[x]: if len(y.name)>1: self.classifierNum+=1
def get_whd(xml): root = xml.xpath('.//node[@rel="whd"]') woorden = [] for node in root: tree_y = tree_yield(node) woorden.append(tree_y.lower()) if "wanneer" in woorden: return properties.propertiesWanneer() elif "waar" in woorden: return properties.propertiesWaar() else: return properties.properties()
def __init__(self): #tworzenie instncji parametrow self.parametry = properties() self.allowFulCover = 1 #Gramatyka w postaci slownika #self.G = {"S":[classifier("AB","S"),classifier("AC","S")],"A":[classifier("BA","A"),classifier("a","A")],"B":[classifier("CC","B"),classifier("b","B")],"C":[classifier("AB","C"),classifier("a","C")]} self.G = {"S":[classifier("AB","S"),classifier("AC","S")],"C":[classifier("SB","C"),classifier("a","C")],"B":[classifier("BB","B"),classifier("b","B")],"A":[classifier("a","A")]} self.G_backUp = {"S":[classifier("AB","S"),classifier("AC","S")],"C":[classifier("SB","C"),classifier("a","C")],"B":[classifier("BB","B"),classifier("b","B")],"A":[classifier("a","A")]} self.classifierNum=0 self.allowCover = 1 for x in self.G: for y in self.G[x]: if len(y.right)>1: self.classifierNum+=1
def __init__(self, parent=None): QtGui.QWidget.__init__(self, parent) self.dialogTasks = tasks_generator(self) self.ui = Ui_GCS() self.parametry = properties() self.testingWords = [] self.ui.setupUi(self) self.FLCS = funnction_FLSC(self) #definiuje obiekt obsługujący popUp'y: self.popUp = popUp(self) self.genetyk = genetyk(self.FLCS.getGrammar(),self.parametry) self.fileIndex = 0 #self.ui.window_CYK.addTab(self.genTabCYK()) #print "tutaj gramtyka na okno idzie:" self.ui.window_grammar.setText(self.FLCS.getGrammar().getGramarStr()) #dodawanie dotatkowych combosow w oknie: self.addMembershipFunctionBox() self.addFuzzyFunctionBox() #ustawianie polaczen: #wystartowanie algorytmu: QtCore.QObject.connect(self.ui.start_GLBS, QtCore.SIGNAL("clicked()"),self.genTabCYK) #otwieranie pliku QtCore.QObject.connect(self.ui.menuOpcje, QtCore.SIGNAL("clicked()"),self.openFile) #przycisk do testow (nastepny krok) QtCore.QObject.connect(self.ui.change_word,QtCore.SIGNAL("clicked()"),self.testy) #nastepny krok #QtCore.QObject.connect(self.ui.change_word,QtCore.SIGNAL("clicked()"),self.readLine) #pobranie danych z formularza zmiennych QtCore.QObject.connect(self.ui.propertiesAccept,QtCore.SIGNAL("clicked()"),self.getProperties) #generowanie zdan uczacych button: QtCore.QObject.connect(self.ui.create_test,QtCore.SIGNAL("clicked()"),self.generate_tasks) #anulowanie wprowadzanych preferenci QtCore.QObject.connect(self.ui.propertiesCancel,QtCore.SIGNAL("clicked()"),self.cancelProperties) #QtCore.QObject.connect(self.ui.change_word,QtCore.SIGNAL("clicked()"),self.readLine) self.cancelProperties() self.getProperties() #otwieranie pliku: QtCore.QObject.connect(self.ui.open_file, QtCore.SIGNAL("clicked()"),self.openFile)
def __init__(self): self.args = argparser() propfiles = ['base.props'] if self.args.props_file: propfiles.append(self.args.props_file) self.props = properties(*propfiles, cmd_args=self.args.extra_props) cluster.make_cluster_info(self.props) self.bubble_thread = None if self.args.log_path or self.args.output: self.log_path = self.args.log_path or '.' if self.log_path[-1] != '/': self.log_path += '/' self.log_filename = self.args.output or \ 'epidemia_{}'.format(datetime.now().strftime('%Y%m%dT%H%M%S_%f')[:-3]) self.log_file = open(f'{self.log_path}{self.log_filename}.log', 'w') else: self.log_path = None self.log_filename = None self.log_file = None
def __init__(self, parent=None): QtGui.QWidget.__init__(self, parent) self.dialogTasks = tasks_generator(self) self.ui = Ui_GCS() self.parametry = properties() self.testingWords = [] self.ui.setupUi(self) self.G = gramatyka() self.genetyk = genetyk(self.G,self.parametry) self.fileIndex = 0 #self.ui.window_CYK.addTab(self.genTabCYK()) print "tutaj gramtyka na okno idzie:" self.ui.window_grammar.setText(self.G.getGramarStr()) #ustawianie polaczen: #wystartowanie algorytmu: QtCore.QObject.connect(self.ui.start_GLBS, QtCore.SIGNAL("clicked()"),self.genTabCYK) #otwieranie pliku QtCore.QObject.connect(self.ui.menuOpcje, QtCore.SIGNAL("clicked()"),self.openFile) #puusty przycis narazie #QtCore.QObject.connect(self.ui.change_word,QtCore.SIGNAL("clicked()"),self.changeWord) #otwieranie pliku: QtCore.QObject.connect(self.ui.open_file, QtCore.SIGNAL("clicked()"),self.openFile) #nastepny krok QtCore.QObject.connect(self.ui.change_word,QtCore.SIGNAL("clicked()"),self.readLine) #popranie danych z formularza zmiennych, jeszcze sa tam jakies braki!! QtCore.QObject.connect(self.ui.propertiesAccept,QtCore.SIGNAL("clicked()"),self.getProperties) #generowanie zdan uczacych button: QtCore.QObject.connect(self.ui.create_test,QtCore.SIGNAL("clicked()"),self.generate_tasks) #anulowanie wprowadzanych preferenci QtCore.QObject.connect(self.ui.propertiesCancel,QtCore.SIGNAL("clicked()"),self.cancelProperties) #QtCore.QObject.connect(self.ui.change_word,QtCore.SIGNAL("clicked()"),self.readLine) self.cancelProperties()
def __make_common_files(command_line, codegen_service): """Generate the files that are common to both the client and server.""" # The first entry is the template. # The second entry is the name of the file being created. common_files = [ [project(), ".project"], [properties(), "project.properties"], [AndroidManifest(), "AndroidManifest.xml"], [AndroidStrings(), os.path.normpath("res/values/strings.xml")], [AndroidLayoutMain(), os.path.normpath("res/layout/main.xml")], [classpath(), ".classpath"] ] projects = ["Client"] if not command_line.client_only: projects.append("Service") for client_or_service_string in projects: for template, subfilename in common_files: template.client_or_service = client_or_service_string filename = os.path.join(client_or_service_string, subfilename) __make_target_file(template, filename, command_line, codegen_service) temp = ServiceMain() sub_path0 = "src" sub_path1 = __get_well_known_name_path(command_line) filename = os.path.join("Service", sub_path0, sub_path1, "ServiceMain.java") __make_target_file(temp, filename, command_line, codegen_service) temp = ClientMain() filename = os.path.join("Client", sub_path0, sub_path1, "ClientMain.java") __make_target_file(temp, filename, command_line, codegen_service) return
#!/usr/local/bin/python # Check if everything in data_path is processed import os, sys, string, glob from properties import properties data_path = properties('data_path') model_path = properties('model_path') eval_path = properties('eval_path') for dataset in os.listdir(data_path): if "Strain" in dataset: continue src = data_path + "/" + dataset + "/" for dbfile in glob.glob(src +"/????.sqlite"): stationID = string.split(dbfile, "/")[-1][:-7] model_dest = model_path + "/" + dataset + "/" + "daily_project_" + stationID + "/*.Q" eval_dest = eval_path + "/" + dataset + "/" + "daily_project_" + stationID + "*" #if not os.path.exists(model_dest): if glob.glob(model_dest) == []: print model_dest if glob.glob(eval_dest) == []: print eval_dest
#!/usr/local/bin/python #========================================================================== # Ingest, and execute rdahmm evaluation for UNR datasets # Set up a cron job to run nightly # # usage: cron_rdahmm_unr.py # #=========================================================================== import os, subprocess, sys from threading import Thread from properties import properties unr_cmd = properties('script_path') + "/unr_ingest_single.py" eval_cmd = properties('script_path') + "/rdahmm_eval_single.py" xml_cmd = properties('script_path') + "/create_summary_xmls.py" json_cmd = properties('script_path') + "/create_summary_jsons.py" class ThreadJob(Thread): def __init__(self, dataset): Thread.__init__(self) self.source = dataset self.dataset = "UNR_" + dataset.upper() def run(self): # ingest a given dataset: igs08 | fid print "+++Starting process UNR ", self.source, " ..." cmd = unr_cmd p = subprocess.Popen([cmd, self.source], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() if p.returncode != 0:
from datetime import date from datetime import timedelta from properties import properties numargv = len(sys.argv) if numargv == 1: sys.exit("usage: scripps_ingest_single.py /path/to/scripps_data.tar") elif numargv == 2: [scripps_path, tarfile] = os.path.split(sys.argv[1]) scripps_path += "/" else: sys.exit("Invalid number of parameters!") #rdahmm_path = "/home/yuma/RDAHMM/Data/" #temp_path = "/home/yuma/RDAHMM/TEMP/" data_path = properties('data_path') temp_path = properties('temp_path') datadir = data_path + tarfile[:tarfile.rfind("_")] + "/" #dbfile = datadir + tarfile[:-4] + ".sqlite" # get rid of timestamp from db file name dbfile = datadir + tarfile[:-13] + ".sqlite" workdir = temp_path + tarfile[:tarfile.rfind("_")] + "/" #print datadir, dbfile if not os.path.exists(datadir): cmd = "mkdir -p " + datadir os.system(cmd) if not os.path.exists(workdir): cmd = "mkdir -p " + workdir os.system(cmd)
class EModel: def __init__(self): self._elev = [EElevator(eid) for eid in range(5)] self._up = [0 for i in range(20)] self._down = [0 for i in range(20)] self._goto = [[0 for i in range(20)] for eid in range(5)] def __getstatus(self, e): elev = self._elev[e] if elev.disable: return state_t.STAT_DISABLED elif elev.timeout: return state_t.STAT_DOCKING # docking elif len(elev.route) <= 1: return state_t.STAT_FREE # free elif elev.route[0][0] < elev.route[1][0]: return state_t.STAT_UP # up else: return state_t.STAT_DOWN # down def __setdisable(self, key, value): self._elev[key].disable = value def __setwaiting(self, key, value): status = self.status[key] if status == state_t.STAT_FREE or status == state_t.STAT_DOCKING: self._elev[key].timeout = value def __outRequest(self, reqpos, reqdir): '''reqdir == REQ_UP | REQ_DOWN''' _updown = self._up if reqdir == req_t.REQ_UP else self._down if not _updown[reqpos]: _updown[reqpos] = 1 # dispatch mindist, u = 10000, None for i in range(5): if self._elev[i].disable: continue dist = self._elev[i].distance(reqpos, reqdir) print(' - %d号电梯距离为%d' % (i + 1, dist)) if dist < mindist: mindist, u = dist, i if u is None: print(' -> 当前无电梯可用!') _updown[reqpos] = 0 return print(' -> 选择%d号电梯' % (u + 1)) self._elev[u].request(reqpos, reqdir) def __inRequest(self, e, i): if not self._goto[e][i]: self._goto[e][i] = 1 self._elev[e].request(i, req_t.REQ_NOSPEC) if self._elev[e].route[0][0] == i: self._elev[e].timeout = TIMEOUT # Properties # 向上请求按钮 up = properties( lambda self, key: self._up[key], lambda self, key, value: self.__outRequest(key, req_t.REQ_UP)) # 向下请求按钮 down = properties( lambda self, key: self._down[key], lambda self, key, value: self.__outRequest(key, req_t.REQ_DOWN)) # 当前层数 level = properties(lambda self, key: self._elev[key].route[0][0]) # 电梯当前状态 status = properties(lambda self, key: self.__getstatus(key)) # 电梯等待时间 wait = properties(lambda self, key: self._elev[key].timeout, lambda self, key, value: self.__setwaiting(key, value)) # 是否已被禁用 disable = properties( lambda self, key: self._elev[key].disable, lambda self, key, value: self.__setdisable(key, value)) # goto = properties(lambda self, key: getattr(self, '_goto' + str(key))) _goto0 = properties(lambda self, key: self._goto[0][key], lambda self, key, value: self.__inRequest(0, key)) _goto1 = properties(lambda self, key: self._goto[1][key], lambda self, key, value: self.__inRequest(1, key)) _goto2 = properties(lambda self, key: self._goto[2][key], lambda self, key, value: self.__inRequest(2, key)) _goto3 = properties(lambda self, key: self._goto[3][key], lambda self, key, value: self.__inRequest(3, key)) _goto4 = properties(lambda self, key: self._goto[4][key], lambda self, key, value: self.__inRequest(4, key)) def update(self): '''根据当前的状态得出下一个状态''' for i in range(5): elev = self._elev[i] if elev.disable: continue ret = elev.update() curpos = elev.route[0][0] self._goto[i][curpos] = 0 if self.status[i] == state_t.STAT_DOCKING: if len(elev.route) <= 1: self._up[curpos] = 0 self._down[curpos] = 0 elif curpos < elev.route[1][0]: self._up[curpos] = 0 elif curpos > elev.route[1][0]: self._down[curpos] = 0
data = data[:19]+'.txt' data = data.replace(' ','_') data = data.replace('-','_') data = data.replace(':','_') return data def makePropertiesHeader(self): #self.parent.getParams().getAtributsString() params = self.parent.getParams() dict = params.__dict__ tmp = "" self.addLine(str(self.fileData)) for x in dict: tmp += str(x) +" = "+str(dict[x]) + "/n" self.addLine(tmp) def addLine(self, line): self.raport.write(line) def close(self): self.raport.close() if __name__ == "__main__": #cos = testReporter("assa") proper = properties() print proper.__dict__ print open.__doc__ print 'done'
from properties import properties app = properties()
# Some useful global constants today = datetime.today() xmlDeclaration = '<?xml version="1.0" encoding="UTF-8"?>' serverName = "gf9.ucs.indiana.edu" #updateTime="2012-07-20T06:12:05" updateTime = str(today.strftime("%Y-%m-%dT%H:%M:%S")) beginDate = "1994-01-01" endDate = str(today.strftime("%Y-%m-%d")) centerLng = "-119.7713889" centerLat = "36.7477778" stationCount = "1532" # Used to separate parts of the station name SEPARATOR_CHARACTER = "_" NO_DATA_TIME_STAMP = "22:22:22" FINAL_PATH = properties('eval_path') xmlHeaderTemplate = """ <update-time>%s</update-time> <data-source>%s</data-source> <begin-date>%s</begin-date> <end-date>%s</end-date> <center-longitude>%s</center-longitude> <center-latitude>%s</center-latitude> <output-pattern> <server-url>http://%s/daily_rdahmmexec/daily/%s</server-url> <stateChangeNumTxtFile>stateChangeNums.txt</stateChangeNumTxtFile> <stateChangeNumJsInput>stateChangeNums.txt.jsi</stateChangeNumJsInput> <allStationInputName>all_stations.all.input</allStationInputName> <Filters>Fill-Missing</Filters> <pro-dir>daily_project_{!station-id!}_%s</pro-dir> <AFile>daily_project_{!station-id!}.A</AFile>
if numargv == 1: sys.exit("usage: unavco_ingest_single.py pbo|nucleus") elif numargv == 2: dataset = sys.argv[1] else: sys.exit("Invalid number of parameters!") if dataset == 'pbo': station_list = pbo_sites() elif dataset == 'nucleus': station_list = nucleus_sites() else: sys.exit("Unrecognized dataset: " + dataset) #rdahmm_path = "/home/yuma/RDAHMM/Data/" data_path = properties('data_path') datadir = data_path + "UNAVCO_" + dataset.upper() + "/" dbfile = datadir + "UNAVCO_" + dataset.upper() + ".sqlite" #print datadir, dbfile if not os.path.exists(datadir): cmd = "mkdir -p " + datadir os.system(cmd) #if the same db file exists, drop it if os.path.isfile(dbfile): print "deleting old database " + dbfile os.remove(dbfile) # creating/connecting the database
#!/usr/local/bin/python #========================================================================== # Download, ingest, and execute rdahmm evaluation for updated scripps datasets # Set up a cron job to run nightly # # usage: cron_rdahmm.py # #=========================================================================== import os, subprocess, sys, glob import urllib, string import re from threading import Thread from properties import properties cron_path = properties('cron_path') + "/SCRIPPS_RAW/" download_path = properties('download_path') + "/SCRIPPS_RAW/" #model_path = properties('model_path') scripps_data = properties('cron_path') + "/SCRIPPS_RAW/*.tar" scripps_cmd = properties('script_path') + "/scripps_ingest_single_raw.py" eval_cmd = properties('script_path') + "/rdahmm_eval_single.py" xml_cmd = properties('script_path') + "/create_summary_xmls.py" json_cmd = properties('script_path') + "/create_summary_jsons.py" class ThreadJob(Thread): def __init__(self, tarball): Thread.__init__(self) self.tarball = tarball self.dataset = string.split(tarball, "/")[-1][:-13]
#!/usr/local/bin/python #========================================================================== # Download, ingest, and execute rdahmm evaluation for updated scripps datasets # Set up a cron job to run nightly # # usage: cron_rdahmm.py # #=========================================================================== import os, subprocess, sys, glob import urllib, string import re from threading import Thread from properties import properties cron_path = properties('cron_path') download_path = properties('download_path') + "/WesternNorthAmerica/" #model_path = properties('model_path') scripps_data = properties('cron_path') + "/WesternNorthAmerica/*.tar" scripps_cmd = properties('script_path') + "/scripps_ingest_single.py" scripps_cmd_raw = properties( 'script_path') + "/scripps_ingest_single_wnam_raw.py" eval_cmd = properties('script_path') + "/rdahmm_eval_single.py" xml_cmd = properties('script_path') + "/create_summary_xmls.py" json_cmd = properties('script_path') + "/create_summary_jsons.py" class ThreadJob(Thread): def __init__(self, tarball): Thread.__init__(self) self.tarball = tarball
# -*- coding: utf-8 -*- """ Created on Mon Mar 26 12:45:43 2018 @author: p.tagade """ import sys sys.path.append('./code/') from properties import properties smiles = 'C1=CC=CC=C1' properties = properties(smiles, h**o=None, lumo=None) h**o = properties['h**o'] lumo = properties['lumo'] oxidation = properties['oxidation'] reduction = properties['reduction']
#!/usr/bin/python # Check if everything in data_path is processed import os, sys, string, glob from properties import properties data_path = properties('data_path') model_path = properties('model_path') eval_path = properties('eval_path') for dataset in os.listdir(data_path): if "Strain" in dataset: continue src = data_path + "/" + dataset + "/" for dbfile in glob.glob(src +"/????.sqlite"): stationID = string.split(dbfile, "/")[-1][:-7] model_dest = model_path + "/" + dataset + "/" + "daily_project_" + stationID + "/*.Q" eval_dest = eval_path + "/" + dataset + "/" + "daily_project_" + stationID + "*" #if not os.path.exists(model_dest): if glob.glob(model_dest) == []: print model_dest if glob.glob(eval_dest) == []: print eval_dest
#!/usr/local/bin/python #========================================================================== # Ingest both PBO and NUCLEUS datasets. # Use subprocess to invoke unavco_ingest_single.py for parallel processing # # usage: unavco_ingest.py # #=========================================================================== import os, subprocess, sys from threading import Thread from properties import properties unavco_cmd = properties('script_path') + "/unavco_ingest_single.py" class ThreadJob(Thread): def __init__(self, dataset): Thread.__init__(self) self.dataset = dataset def run(self): cmd = unavco_cmd # start = time.time() print "+++Starting process UNAVCO ", dataset, " ..." p = subprocess.Popen([cmd, self.dataset], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() # end = time.time() if p.returncode != 0: print p.stderr
#!/usr/local/bin/python #========================================================================== # Execute rdahmm modeling for all ingested scripps datasets. # Use subprocess to invoke multiple rdahmm_model_single.py for # parallel processing # # usage: rdahmm_model.py # #=========================================================================== import os, glob, subprocess, sys from threading import Thread from properties import properties data_path = properties('data_path') model_cmd = properties('script_path') + "/rdahmm_model_single_debug.py" class ThreadJob(Thread): def __init__(self, dataset): Thread.__init__(self) self.dataset = dataset def run(self): cmd = model_cmd # start = time.time() print "+++Starting process ", dataset, " ..." p = subprocess.Popen([cmd, self.dataset], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() # end = time.time()
#Now convert to a string lastDataDataP1String=lastDataDatePlus1.isoformat() noDataString=formattedToday+"to"+lastDataDataP1String+";"+noDataString xmlFile.write(noDataString) # Clean up and close xmlFile.write("</time-nodata>\n") xmlFile.write("\t\t<nodata-count>"+str(len(noDataString.split(";"))-1)+"</nodata-count>\n") rawFile.close #-------------------------------------------------- # Now run the script #-------------------------------------------------- # Set the properties sites_file_path = properties('download_path')+"/WNAMsites" eval_dir_path = properties('eval_path') #Assign the arrays #stations_array=readStationsList(sites_file_path) #datasets_array=readProjectDirsList(eval_dir_path) # Loop through each data set for dataSet in os.listdir(eval_dir_path): projectDir=eval_dir_path+"/"+dataSet if(os.path.isdir(projectDir)): # Open the XML file that will contain the results outputPath="./junk-" + dataSet + ".xml" xmlFile=open(outputPath,"w"); # Print XML front matter
def getModuleProperties(module): dict_moduleInfo = properties.properties(module) return dict_moduleInfo
#=========================================================================== import os, sys, string, glob import sqlite3 as db import datetime, csv from properties import properties import zipfile numargv = len(sys.argv) if numargv == 1: sys.exit("usage: rdahmm_eval_single.py scripps_dataset_name") elif numargv == 2: dataset = sys.argv[1] else: sys.exit("Invalid number of parameters!") data_path = properties('data_path') + "/" + dataset + "/" model_path = properties('model_path') + "/" + dataset + "/" eval_path = properties('eval_path') + "/" + dataset + "/" rdahmm_bin = properties('rdahmm_bin') today = datetime.date.today().isoformat() if not os.path.exists(eval_path): cmd = "mkdir -p " + eval_path os.system(cmd) for station in os.listdir(model_path): stationID = station[-4:] # check if the station still exists reffile = model_path + "daily_project_" + stationID + "/daily_project_" + stationID + ".input.ref"
serverName = "gf9.ucs.indiana.edu" updateTime = str(today.strftime("%Y-%m-%dT%H:%M:%S")) beginDate = "1994-01-01" endDate = str(today.strftime("%Y-%m-%d")) # endDate = '2016-10-19' centerLng = "-119.7713889" centerLat = "36.7477778" stateChangeNumTxtFile = "stateChangeNums.txt" stateChangeNumJsInput = "stateChangeNums.txt.jsi" allStationInputName = "all_stations.all.input" filters = "Fill_Missing" # Used to separate parts of the station name SEPARATOR_CHARACTER = "_" NO_DATA_TIME_STAMP = "22:22:22" FINAL_PATH = properties('eval_path') def setStationId(stationList, stationData): #Get the station name. stationName = stationList.split(SEPARATOR_CHARACTER)[2] stationData['id'] = stationName stationData['pro_dir'] = "daily_project_" + stationName + "_" + endDate stationData['AFile'] = "daily_project_" + stationName + ".A" stationData['BFile'] = "daily_project_" + stationName + ".B" stationData[ 'InputFile'] = "daily_project_" + stationName + "_" + endDate + ".all.input" stationData[ 'RawInputFile'] = "daily_project_" + stationName + "_" + endDate + ".all.raw" stationData[
#!/usr/bin/python #========================================================================== # Execute rdahmm modeling for all ingested scripps datasets. # Use subprocess to invoke multiple rdahmm_model_single.py for # parallel processing # # usage: rdahmm_model.py # #=========================================================================== import os, glob, subprocess, sys from threading import Thread from properties import properties data_path = properties('data_path') model_cmd = properties('script_path') + "/rdahmm_model_single.py" class ThreadJob(Thread): def __init__(self, dataset): Thread.__init__(self) self.dataset = dataset def run(self): cmd = model_cmd # start = time.time() print "+++Starting process ", dataset, " ..." p = subprocess.Popen([cmd, self.dataset], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() # end = time.time() if p.returncode != 0: print p.stderr
# /path/to/rdahmm/stationID.sqlite #=========================================================================== import os, sys, string import sqlite3 as db from properties import properties #numargv = len(sys.argv) #if numargv == 1: # sys.exit("usage: unr_ingest_single.py igs08|fid") #elif numargv == 2: # dataset = sys.argv[1] #else: # sys.exit("Invalid number of parameters!") model_path = properties('model_path') igs_model_path = model_path + "/UNR_IGS08/" fid_model_path = model_path + "/UNR_FID/" data_path = properties('data_path') igs_datadir = data_path + "/UNR_IGS08/" fid_datadir = data_path + "/UNR_FID/" datadir = data_path + "/UNR_SPLICE/" igs_dbfile = igs_datadir + "UNR_IGS08.sqlite" fid_dbfile = fid_datadir + "UNR_FID.sqlite" dbfile = datadir + "UNR_SPLICE.sqlite" #print datadir, dbfile if not os.path.exists(datadir): cmd = "mkdir -p " + datadir
#=========================================================================== import os, sys, string, glob import sqlite3 as db import datetime, csv from properties import properties import subprocess numargv = len(sys.argv) if numargv == 1: sys.exit("usage: rdahmm_model_single.py scripps_dataset_name") elif numargv == 2: dataset = sys.argv[1] else: sys.exit("Invalid number of parameters!") data_path = properties('data_path') + "/" + dataset + "/" model_path = properties('model_path') + "/" + dataset + "/" train_epoch = properties('train_epoch') rdahmm_bin = properties('rdahmm_bin') datasetdb = glob.glob(data_path+dataset+"*.sqlite")[0] datasetconn = db.connect(datasetdb) datasetcur = datasetconn.cursor() if not os.path.exists(model_path): cmd = "mkdir -p " + model_path os.system(cmd) for dbfile in glob.glob(data_path+"/????.sqlite"): stationID = string.split(dbfile, "/")[-1][:-7] #print stationID
#=========================================================================== import os, sys, string, glob import sqlite3 as db import datetime, csv from properties import properties import zipfile numargv = len(sys.argv) if numargv == 1: sys.exit("usage: rdahmm_eval_single.py scripps_dataset_name") elif numargv == 2: dataset = sys.argv[1] else: sys.exit("Invalid number of parameters!") data_path = properties('data_path') + "/" + dataset + "/" model_path = properties('model_path') + "/" + dataset + "/" eval_path = properties('eval_path') + "/" + dataset + "/" rdahmm_bin = properties('rdahmm_bin') today = datetime.date.today().isoformat() if not os.path.exists(eval_path): cmd = "mkdir -p " + eval_path os.system(cmd) for station in os.listdir(model_path): stationID = station[-4:] # station eval directory is named as: daily_project_twhl_2012-05-08 stationDir = eval_path + "daily_project_" + stationID + "_" + today + "/" if not os.path.exists(stationDir):
red = mol_properties['PAH']['Red'] h**o = mol_properties['PAH']['H**o'] lumo = mol_properties['PAH']['Lumo'] # ----------------------------------------------------------------------------- cases = [] homo_pred = np.zeros(len(smiles)) lumo_pred = np.zeros(len(smiles)) oxidation_pred = np.zeros(len(smiles)) reduction_pred = np.zeros(len(smiles)) # Test case 1: Predicting only from SMILES for i in range(0, len(smiles)): mol = smiles[i] prediction = properties(mol) lumo_pred[i] = prediction['lumo'] homo_pred[i] = prediction['h**o'] oxidation_pred[i] = prediction['oxidation'] reduction_pred[i] = prediction['reduction'] cases.append([lumo_pred, homo_pred, oxidation_pred, reduction_pred]) homo_pred = np.zeros(len(smiles)) lumo_pred = np.zeros(len(smiles)) oxidation_pred = np.zeros(len(smiles)) reduction_pred = np.zeros(len(smiles)) # Test case 2: SMILES and lumo as input for i in range(0, len(smiles)): mol = smiles[i]
# Reset raw date file readline to 0 allRawInput.seek(0) (iterday,lineindex)=iterateOverDays(iterday,allRawInput,outputTmpList,lineindex) # We have passed the end of data for the station, so fill the remaining days up to today # with NaN lines. This may not be executed. handleStationEndData(iterday, outputTmpList,lineindex) #close and return allRawInput.close() return #-------------------------------------------------- # Below is the actual execution #-------------------------------------------------- eval_dir_path = properties('eval_path') # Loop over each data set for dataSet in os.listdir(eval_dir_path): projectDir=eval_dir_path+"/"+dataSet if(os.path.isdir(projectDir)): projectAllStationsOutputDir=BASE_OUTPUT_DIR+"/"+dataSet # Make the directory to hold the output if necessary if(not os.path.isdir(projectAllStationsOutputDir)): os.makedirs(projectAllStationsOutputDir) # This is a list to store the lines of the file. outputTmpList=[] writeTimeColumn(outputTmpList) # Loop over station directories for stationDir in os.listdir(projectDir): if (os.path.isdir(projectDir+"/"+stationDir)):
#!/usr/bin/python #========================================================================== # Ingest all scripps datasets downloaded into proper directories and databases # Original download directory path is defined in properties. Use subprocess to # invoke multiple scripps_ingest_single.py for parallel processing # # usage: scripps_ingest.py # #=========================================================================== import os, glob, subprocess, sys from threading import Thread from properties import properties #import time #scripps_data = "/home/yuma/RDAHMM/Download/WesternNorthAmerica/*.tar" scripps_data = properties('download_path') + "/WesternNorthAmerica/*.tar" scripps_cmd = properties('script_path') + "/scripps_ingest_single.py" #print scripps_data, scripps_cmd #sys.exit(0) class ThreadJob(Thread): def __init__(self, dataset): Thread.__init__(self) self.dataset = dataset def run(self): #cmd = "/home/yuma/RDAHMM/Scripts/scripps_ingest_single.py" cmd = scripps_cmd # start = time.time() print "+++Starting process ", dataset, " ..."
#!/usr/bin/python #========================================================================== # Download, ingest, and execute rdahmm evaluation for updated scripps datasets # Set up a cron job to run nightly # # usage: cron_rdahmm.py # #=========================================================================== import os, subprocess, sys, glob import urllib,string import re from threading import Thread from properties import properties cron_path = properties('cron_path') download_path = properties('download_path') + "/WesternNorthAmerica/" #model_path = properties('model_path') scripps_data = properties('cron_path') + "/WesternNorthAmerica/*.tar" scripps_cmd = properties('script_path') + "/scripps_ingest_single.py" eval_cmd = properties('script_path') + "/rdahmm_eval_single.py" class ThreadJob(Thread): def __init__(self, tarball): Thread.__init__(self) self.tarball = tarball self.dataset = string.split(tarball, "/")[-1][:-13] def run(self): # ingest a given tar ball
def multiple_scenarios(path_list_scenarios, path_LCA_embodied_energy, path_LCA_embodied_emissions, path_archetypes, path_schedules, path_temporary_folder, gv): """ Algorithm to calculate multiple scenarios at the time Parameters ---------- To do Returns ------- To do """ paths_of_scenarios = pd.read_excel(path_list_scenarios, sheetname='scenarios') number_scenarios = paths_of_scenarios.path_scenario.count() for x in range(number_scenarios): if paths_of_scenarios.run_properties[x] == 1: path_buildings = paths_of_scenarios.path_scenario[ x] + '\\' + r'101_input files\feature classes' + '\\' + 'buildings.shp' # noqa path_generation = paths_of_scenarios.path_scenario[ x] + '\\' + r'101_input files\feature classes' + '\\' + 'generation.shp' # noqa path_results = paths_of_scenarios.path_scenario[ x] + '\\' + r'102_intermediate output\building properties' # noqa generate_uses = True generate_envelope = True generate_systems = True generate_equipment = True properties.properties(path_archetypes, path_buildings, path_generation, path_results, generate_uses, generate_envelope, generate_systems, generate_equipment, gv) message = 'Properties scenario ' + str(x) + ' completed' arcpy.AddMessage(message) if paths_of_scenarios.run_demand[x] == 1: path_radiation = paths_of_scenarios.path_scenario[ x] + '\\' + r'102_intermediate output\radiation data' + '\\' + paths_of_scenarios.file_name_radiation[ x] # noqa path_weather = paths_of_scenarios.path_scenario[ x] + '\\' + r'101_input files\weather data' + '\\' + paths_of_scenarios.file_name_weather[ x] # noqa path_results = paths_of_scenarios.path_scenario[ x] + '\\' + r'103_final output\demand' # noqa path_properties = paths_of_scenarios.path_scenario[ x] + '\\' + r'102_intermediate output\building properties\properties.xls' # noqa path_temporary_folder = tempfile.gettempdir() demand.analytical(path_radiation, path_schedules, path_temporary_folder, path_weather, path_results, path_properties, gv) message = 'Demand scenario ' + str(x) + ' completed' arcpy.AddMessage(message) if paths_of_scenarios.run_emissions[x] == 1: path_results = paths_of_scenarios.path_scenario[ x] + '\\' + r'103_final output\emissions' # noqa path_LCA_operation = paths_of_scenarios.path_scenario[ x] + '\\' + r'101_input files\LCA data\LCA_operation.xls' # noqa path_properties = paths_of_scenarios.path_scenario[ x] + '\\' + r'102_intermediate output\building properties\properties.xls' # noqa path_total_demand = paths_of_scenarios.path_scenario[ x] + '\\' + r'103_final output\demand\Total_demand.csv' # noqa emissions.lca_operation(path_total_demand, path_properties, path_LCA_operation, path_results) message = 'emissions operation scenario ' + str(x) + ' completed' arcpy.AddMessage(message) if paths_of_scenarios.run_embodied[x] == 1: path_results = paths_of_scenarios.path_scenario[ x] + '\\' + r'103_final output\emissions' # noqa path_properties = paths_of_scenarios.path_scenario[ x] + '\\' + r'102_intermediate output\building properties\properties.xls' # noqa yearcalc = paths_of_scenarios.year_calc[x] retrofit_windows = True retrofit_roof = True retrofit_walls = True retrofit_partitions = True retrofit_int_floors = True retrofit_installations = True retrofit_basement_floor = True embodied.lca_embodied(path_LCA_embodied_energy, path_LCA_embodied_emissions, path_properties, path_results, yearcalc, retrofit_windows, retrofit_roof, retrofit_walls, retrofit_partitions, retrofit_int_floors, retrofit_installations, retrofit_basement_floor, gv) message = 'gray emissions scenario ' + str(x) + ' completed' arcpy.AddMessage(message) if paths_of_scenarios.run_heatmaps[x] == 1: analysis_field_variables = ["QHf", "QCf", "Ef"] # noqa path_buildings = paths_of_scenarios.path_scenario[ x] + '\\' + r'101_input files\feature classes' + '\\' + 'buildings.shp' # noqa path_variables = paths_of_scenarios.path_scenario[ x] + '\\' + r'103_final output\demand' # noqa path_results = paths_of_scenarios.path_scenario[ x] + '\\' + r'103_final output\heatmaps' # noqa file_variable = 'Total_demand.csv' heatmaps.heatmaps(analysis_field_variables, path_variables, path_buildings, path_results, path_temporary_folder, file_variable) print 'finished'
else: sys.exit("Invalid number of parameters!") if dataset == 'igs08': url_prefix = "http://geodesy.unr.edu/gps_timeseries/tenv3/IGS08/" url_suffix = ".IGS08.tenv3" elif dataset == 'fid': url_prefix = "http://geodesy.unr.edu/gps_timeseries/rapids/tenv3/FID/" url_suffix = ".FID.tenv3" else: sys.exit("Unrecognized dataset: " + dataset) station_list = unr_sites() #rdahmm_path = "/home/yuma/RDAHMM/Data/" data_path = properties('data_path') temp_path = properties('temp_path') model_path = properties('model_path') datadir = data_path + "UNR_" + dataset.upper() + "/" dbfile = datadir + "UNR_" + dataset.upper() + ".sqlite" workdir = temp_path + "UNR_" + dataset.upper() + "/" #print datadir, dbfile #Make a data directory for the data set if needed. if not os.path.exists(datadir): cmd = "mkdir -p " + datadir os.system(cmd) if not os.path.exists(workdir): cmd = "mkdir -p " + workdir os.system(cmd)
def get_data_point(self, name, day): return self.daily[day][name] def get_interesting(self): from_ = None to = len(self.daily) for d in self.daily.keys(): ti = self.get_data_point('total_infected', d) if from_: if d > self.highest_day and self.get_data_point('infected', d) < ti // 5: to = d break elif ti > sqrt(self.population): from_ = d return (from_ or 0, to) if __name__ == '__main__': props = properties('p1.props') print(props.dump()) cluster.make_cluster_info(props) w = world(props=props) for i in range(10): p = get_random_member(w.people, lambda p: 1) print('\n', str(p), 'city:', str(p.city)) if p.clusters.values(): for c in p.clusters.values(): print(indent(c.show_detail(), size=2))
#=========================================================================== import os, sys, string, glob import sqlite3 as db import datetime, csv from properties import properties import subprocess numargv = len(sys.argv) if numargv == 1: sys.exit("usage: rdahmm_model_single.py scripps_dataset_name") elif numargv == 2: dataset = sys.argv[1] else: sys.exit("Invalid number of parameters!") data_path = properties('data_path') + "/" + dataset + "/" model_path = properties('model_path') + "/" + dataset + "/" train_epoch = properties('train_epoch') rdahmm_bin = properties('rdahmm_bin') datasetdb = glob.glob(data_path + dataset + "*.sqlite")[0] datasetconn = db.connect(datasetdb) datasetcur = datasetconn.cursor() if not os.path.exists(model_path): cmd = "mkdir -p " + model_path os.system(cmd) for dbfile in glob.glob(data_path + "/????.sqlite"): stationID = string.split(dbfile, "/")[-1][:-7] #print stationID
def __init__(self, parent, **kwargs): super().__init__(**kwargs) # Store mainWindow to use in slots self.mainWindow = parent # Set tabWidget properties self.setTabsClosable(True) self.setMovable(True) self.setTabShape(QtWidgets.QTabWidget.Triangular) self.tabCloseRequested.connect(self.on_tabCloseRequested) parent.setCentralWidget(self) # ITRI: we need easier interface (loaded default) parent.easyWidget = QtWidgets.QWidget() self.gridLayout = QtWidgets.QGridLayout() parent.easyWidget.setLayout(self.gridLayout) # select part groupboxSelectFormat = QtWidgets.QGroupBox(self.tr("選擇輸出格式 (更改勾選後,再按一次生效)")) sp = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred , QtWidgets.QSizePolicy.Preferred) sp.setVerticalStretch(5) groupboxSelectFormat.setSizePolicy(sp) groupboxSelectFormatLayout = QtWidgets.QHBoxLayout() groupboxSelectFormat.setLayout(groupboxSelectFormatLayout) btn_format1 = QtWidgets.QPushButton() btn_format1.pressed.connect(self.on_btn_format1_pressed) btn_format1.setText(self.tr("附件二")) btn_format2 = QtWidgets.QPushButton() btn_format2.pressed.connect(self.on_btn_format2_pressed) btn_format2.setText(self.tr("專利清查")) btn_format2_2 = QtWidgets.QPushButton() btn_format2_2.pressed.connect(self.on_btn_format2_2_pressed) btn_format2_2.setText(self.tr("授權資料")) btn_format3 = QtWidgets.QPushButton() btn_format3.pressed.connect(self.on_btn_format3_pressed) btn_format3.setText(self.tr("專利清單")) btn_format4 = QtWidgets.QPushButton() btn_format4.pressed.connect(self.on_btn_format4_pressed) btn_format4.setText(self.tr("計價格式")) btn_format5 = QtWidgets.QPushButton() btn_format5.pressed.connect(self.on_btn_format5_pressed) btn_format5.setText(self.tr("計價表格")) sp2 = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred , QtWidgets.QSizePolicy.Preferred) sp2.setVerticalStretch(2) btn_format1.setSizePolicy(sp2) btn_format2.setSizePolicy(sp2) btn_format2_2.setSizePolicy(sp2) btn_format3.setSizePolicy(sp2) btn_format4.setSizePolicy(sp2) btn_format5.setSizePolicy(sp2) bigFont = QtGui.QFont() bigFont.setPointSize(18) bigFont.setBold(True) btn_format1.setFont(bigFont) btn_format2.setFont(bigFont) btn_format2_2.setFont(bigFont) btn_format3.setFont(bigFont) btn_format4.setFont(bigFont) btn_format5.setFont(bigFont) groupboxSelectFormatLayout.addWidget(btn_format1, QtCore.Qt.AlignCenter) groupboxSelectFormatLayout.addWidget(btn_format2, QtCore.Qt.AlignCenter) groupboxSelectFormatLayout.addWidget(btn_format2_2, QtCore.Qt.AlignCenter) groupboxSelectFormatLayout.addWidget(btn_format3, QtCore.Qt.AlignCenter) groupboxSelectFormatLayout.addWidget(btn_format4, QtCore.Qt.AlignCenter) groupboxSelectFormatLayout.addWidget(btn_format5, QtCore.Qt.AlignCenter) self.gridLayout.addWidget(groupboxSelectFormat, 0, 0) # target vals part groupboxTargetVals = QtWidgets.QGroupBox(self.tr("填入目標值")) sp3 = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred , QtWidgets.QSizePolicy.Preferred) sp3.setVerticalStretch(15) groupboxTargetVals.setSizePolicy(sp3) groupboxTargetValsLayout = QtWidgets.QVBoxLayout() groupboxTargetVals.setLayout(groupboxTargetValsLayout) self.checkbox_p40_patentno = QtWidgets.QCheckBox() self.checkbox_p40_patentno.setText(self.tr("件編號")) self.checkbox_p40_patentno.setChecked(True) self.checkbox_p40_patentno.stateChanged.connect(self.on_checkbox_p40_patentno_stateChanged) self.checkbox_p40_applypntno = QtWidgets.QCheckBox() self.checkbox_p40_applypntno.setText(self.tr("申請案號")) self.checkbox_p40_applypntno.stateChanged.connect(self.on_checkbox_p40_applypntno_stateChanged) self.targetValsEdit = QtWidgets.QTextEdit() okToChangeValsBtn = QtWidgets.QPushButton() okToChangeValsBtn.setFixedSize(100, 50) okToChangeValsBtn.setText(self.tr("確認")) okToChangeValsBtn.pressed.connect(self.on_okToChangeValsBtn_pressed) outputBtn = QtWidgets.QPushButton() outputBtn.setFixedSize(100, 50) outputBtn.setText(self.tr("輸出")) outputBtn.pressed.connect(self.on_outputBtn_pressed) groupboxTargetValsLayout.addWidget(self.checkbox_p40_patentno, QtCore.Qt.AlignCenter) groupboxTargetValsLayout.addWidget(self.checkbox_p40_applypntno, QtCore.Qt.AlignCenter) groupboxTargetValsLayout.addWidget(self.targetValsEdit, QtCore.Qt.AlignCenter) groupboxTargetValsLayout.addWidget(okToChangeValsBtn) groupboxTargetValsLayout.addWidget(outputBtn) self.gridLayout.addWidget(groupboxTargetVals, 1, 0) self.addTab(parent.easyWidget, self.tr("簡易介面")) # Main work space parent.mainWidget = QtWidgets.QSplitter() # make field of mainWindow to access mainWidget in other places propertiesWidget = properties.properties(parent) parent.mainWidget.addWidget(propertiesWidget) self.fieldWidget = field.field(parent) parent.mainWidget.addWidget(self.fieldWidget) parent.mainWidget.setSizes([320, 1600]) self.addTab(parent.mainWidget, self.tr("樣板工作區"))
#!/usr/local/bin/python #========================================================================== # Ingest all scripps datasets downloaded into proper directories and databases # Original download directory path is defined in properties. Use subprocess to # invoke multiple scripps_ingest_single.py for parallel processing # # usage: scripps_ingest.py # #=========================================================================== import os, glob, subprocess, sys from threading import Thread from properties import properties #import time #scripps_data = "/home/yuma/RDAHMM/Download/WesternNorthAmerica/*.tar" scripps_data = properties('download_path') + "/WesternNorthAmerica/*.tar" scripps_cmd = properties('script_path') + "/scripps_ingest_single.py" #print scripps_data, scripps_cmd #sys.exit(0) class ThreadJob(Thread): def __init__(self, dataset): Thread.__init__(self) self.dataset = dataset def run(self): #cmd = "/home/yuma/RDAHMM/Scripts/scripps_ingest_single.py" cmd = scripps_cmd # start = time.time() print "+++Starting process ", dataset, " ..."