def __init__(self, agent_filepath=""): Player.__init__(self) # Create the experience memory database if not os.path.exists(REPLAY_MEMORY_FILENAME): self.replay_memory = ReplayMemory() else: self.replay_memory = cPickle.load(open(REPLAY_MEMORY_FILENAME, 'r')) # Initialize the convolutional neural network self.network = MinecraftNet(agent_filepath) self.ae_network = FeatureNet() # Probability of selecting non-random action self.epsilon = STARTING_EPSILON # The total number of frames this agent has been trained on # through all the minibatch training self.frames_trained = 0 # Load old epsilon and frames learned values self.load() self.cnn_action_map = self.initActionMap() # The current and previous sequences of game frames and actions self.current_seq = None self.previous_seq = None self.previous_action = None # Event logging self.log = LogFile("run.log", True)
def supervised_learning(s, readout, h_fc1, sess, train_step, socket,saver): #LOAD NET # saving and loading networks directory = 'LogGameFile' numOfLogGameFile = len([item for item in os.listdir(directory) if os.path.isfile(os.path.join(directory, item))]) #NUM_OF_LEARNED_GAME = numOfLogGameFile print("START OF TRAINING BY SUPERVISED NETWORK") #first state for i in range (0,NUM_OF_LEARNED_GAME): nameLogFile =directory+"/F0000"[:-len(str(i))] + str(i) logFile = LogFile(nameLogFile) gameState = logFile.get_all_game_state() tranning_network(s, readout, h_fc1, sess, gameState,train_step, socket,saver,i) print("DONE SUPERVISED LEARNING") return
def __init__(self, logfile, apiWS,urlPopUp, urlFace, rootDir="/etc/itd/cloudcam/ID-Reader", accuracy = 0.6): self.apiWS = apiWS self.urlPopUp = urlPopUp self.urlFace = urlFace self.consoleLog = LogFile(logfile) self.rootDir = rootDir self.accuracy = accuracy
def GenerateLogFile(self, valueDate, runTime, noLog = False): """ * Return LogFile object and print all exceptions using contents of this object. Inputs: * valueDate: Expecting a date/datetime object. * runTime: Expecting a string. """ # Ensure that value date is a datetime or date: noLog = (False if not isinstance(noLog, bool) else noLog) if not isinstance(valueDate, datetime.datetime) and not isinstance(valueDate, datetime.date): raise ValueError('valueDate must be a datetime or date object.') # Exit if no log file was specified on command line: if not self.HasErrors or noLog: return ###################### # Generate Log File, Add all exceptions and print: ###################### try: logFile = LogFile(valueDate, runTime) for exceptionType in self.Contents.keys(): logFile.Append(self.Contents[exceptionType]) # Print to stored path: logFile.Print() except Exception as err: # Append exception to stored list if failed to generate the log file: self.Add(NonFatals.LogFileFailed(callingFunc = '', logPath = logFile.Path, specific = err.message))
def test_file_bohmer(file): split_dataset(file + "_data.csv", file + "_labels.csv", file + "_train.csv", file + "_test.csv", 10000) train_data = LogFile(file + "_train.csv", ",", 0, 1000000, None, "case_id", "name", convert=False) train_data.remove_attributes(["label"]) model = bohmer.train(train_data, 3, 4, 1) test_data = LogFile(file + "_test.csv", ",", 0, 1000000, None, "case_id", "name", convert=False, values=train_data.values) bohmer.test(test_data, file + "_output_bohmer.csv", model, "label", 0) plot.plot_single_roc_curve(file + "_output_bohmer.csv", file, save_file="../Data/Nolle_Graphs/" + file.split("/")[-1] + "_roc_bohmer.png") plot.plot_single_prec_recall_curve(file + "_output_bohmer.csv", file, save_file="../Data/Nolle_Graphs/" + file.split("/")[-1] + "_precrec_bohmer.png")
def test_file_full(file): split_dataset(file + "_data.csv", file + "_labels.csv", file + "_train.csv", file + "_test.csv", None) train_data = LogFile(file + "_train.csv", ",", 0, 1000000, None, "case_id", "name") train_data.remove_attributes(["label"]) model = edbn.train(train_data) test_data = LogFile(file + "_test.csv", ",", 0, 1000000, None, "case_id", "name", values=train_data.values) edbn.test(test_data, file + "_output_full.csv", model, "label", "0", train_data) plot.plot_single_roc_curve(file + "_output_full.csv", file, save_file="../Data/Nolle_Graphs/" + file.split("/")[-1] + "_roc.png") plot.plot_single_prec_recall_curve(file + "_output_full.csv", file, save_file="../Data/Nolle_Graphs/" + file.split("/")[-1] + "_precrec.png")
def read_all_module(self): # str_array = [] self.ComPort.write('m\r\n'.encode()) # send command to module read_line1 = self.ComPort.readline() print(read_line1) if len(read_line1) == 0: # return 1, 'No main board' x = read_line1.decode().find("m OK") k = 0 if x >= 0: while True: read_line2 = self.ComPort.readline() if len(read_line2) == 0: # continue # str_array.append(read_line2) log_file = LogFile() log_file.write_record(read_line2) # print(read_line2) k = k + 1 print(k) if k == 16: break return 0, 'OK' else: return 2, 'ERROR'
def analyze(): train = LogFile("../Data/bpic2018.csv", ",", 0, None, "startTime", "case", activity_attr=None, integer_input=False, convert=False) print("Num of attributes:", len(train.data.columns)) train.remove_attributes(["eventid", "identity_id", "event_identity_id", "year", "penalty_", "amount_applied", "payment_actual", "penalty_amount", "risk_factor", "cross_compliance", "selected_random", "selected_risk", "selected_manually", "rejected"]) print("Num of attributes:", len(train.data.columns)) print(train.data.columns) for attr in train.data.columns: print(attr, len(train.data[attr].value_counts()))
class Config: configFileName = "config.txt" def writeConfigFile(self): self.log.info('No config file found !Ready create default config file') f = file(self.configFileName, 'w') text = [ '[USER]', 'name = 阳葵', 'cpuID = BFEBFBFF000206A7', 'hddID = 2085256266', 'macID1 = F0:DE:F1:70:C5:64', 'macID2 = 8C:A9:82:B7:7C:E6', '[URL]', 'authCodeUrl = /attendance/jcaptcha/jpeg/imageCaptcha', 'postUrl= /attendance/record/save', 'host=www.kq.com:8080' ] for x in text: f.write(x + "\n") f.close() self.log.info('default config file write ok!') def __init__(self): self.log = LogFile() self.log.info('####starting load config.txt####') if (not os.path.isfile(self.configFileName)): self.writeConfigFile() f = file(self.configFileName) ConfigParser.RawConfigParser.OPTCRE = re.compile( r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.CONFIG = ConfigParser.ConfigParser() self.CONFIG.read(self.configFileName) self.USER = {} USERNAME = self.CONFIG.get('USER', 'name').split("|") CPUID = self.CONFIG.get('USER', 'cpuID').split("|") HDDID = self.CONFIG.get('USER', 'hddID').split("|") MACID1 = self.CONFIG.get('USER', 'macID1').split("|") MACID2 = self.CONFIG.get('USER', 'macID2').split("|") i = 0 for key in USERNAME: userInfo = UserInfo(key, CPUID[i], HDDID[i], MACID1[i], MACID2[i]) self.USER[key] = userInfo i = i + 1 self.HOST = self.CONFIG.get('URL', 'host') self.AUTHCODEURL = 'http://' + self.HOST + self.CONFIG.get( 'URL', 'authCodeUrl') self.POSTURL = 'http://' + self.HOST + self.CONFIG.get( 'URL', 'postUrl') for key, value in self.USER.items(): str = "USERNAME:", key, value.data self.log.info(str) # print self.USER['阳葵'].data self.log.info('authCodeUrl:' + self.AUTHCODEURL) self.log.info('postUrl:' + self.POSTURL) self.log.info('####end load config.txt####')
def __init__(self, log_file: LogFile): self.log_file = log_file self.log_id = max([x.log_id for x in log_file.get_rows() if x.node_id == self.node_id]) self.node_id = max([x.node_id for x in log_file.get_rows()]) + 1 if self.node_id is None: self.node_id = 1 if self.log_id is None: self.log_id = 1
def test_loadEmptyFile(self): log = LogFile() log.createLogfile("empty.test") log2 = LogFile() log2.loadFromFile("empty.test") self.assertTrue(log2.isBrokenFile())
def experiment_department(): input = LogFile("../Data/bpic2018.csv", ",", 0, None, "startTime", "case", convert=False) input.remove_attributes([ "eventid", "identity_id", "event_identity_id", "year", "penalty_", "amount_applied", "payment_actual", "penalty_amount", "risk_factor", "cross_compliance", "selected_random", "selected_risk", "selected_manually", "rejected" ]) input.convert2int() data = input.filter_copy("self.data.department == 1") model = cd.create_model(data, data) print("Starting writing model to file") with open("model_department", "wb") as fout: pickle.dump(model, fout) print("Done") with open("model_department", "rb") as fin: model = pickle.load(fin) for dept in [1, 2, 3, 4]: data = input.filter_copy("self.data.department == " + str(dept)) scores = cd.get_event_detailed_scores(data, model) cd.plot_attribute_graph(scores, model.current_variables)
def main(args): parser = createParser() args = parser.parse_args(args) useDefaultLogFilesPath = True if not isinstance( args.logfiles, list) else False if not useDefaultLogFilesPath: args.logfiles = args.logfiles[0] if folderExist(args.logfiles) and containsFiles(args.logfiles): for logfile in listLogFilesByFolderPath(args): logfile_path = f'{args.logfiles}{logfile}' LogFiles.append(LogFile(logfile_path)) if args.list: print(f"Available logfiles:") for logfile in LogFiles: print(logfile.path) if args.o: if folderExist(args.o): print(f"Sanitizing...") for i, logfile in enumerate(LogFiles): LogFiles[i] = GameStartedTimeStampNormalizer.sanitize(logfile) LogFiles[i] = MutationTimeStampNormalizer.sanitize(logfile) print(f"Writing files... ({args.o})") for logFile in LogFiles: CSVWriter.write(logFile, args.o) CSVWriter.write_mutation(logFile, args.o) CSVWriter.write_mutation_report(LogFiles, args.o)
def read_log_files_from_directory(self, folder_path): """Reads all log file names from a folder path into an array and sends the log files to the table manager to populate the log file table. At the point of reading, If the log file is not a text file, this method sends the log files to the appropriate transcriber. """ # If a folder path was incorrect, the process should not if not os.path.exists(folder_path): print(folder_path, " doesn't exist!") return # Retrieves the new path name created using the existing path name new_path = self.get_temp_path(folder_path) # Creates a new directory if it doesn't exist yet if not os.path.exists(new_path): print("made new folder: ", new_path) os.mkdir(new_path) for f in os.listdir(folder_path): if os.path.isfile(os.path.join(folder_path, f)): if not (any(x.name == f for x in self.event_session.log_files)): # Check if it's an audio file if ".wav" in f: audio_name = AudioRecognition.audio_transcribe( folder_path, new_path, f) self.event_session.log_files.append( LogFile(audio_name, os.path.join(new_path, audio_name))) elif (".png" in f) or (".jpg" in f) or (".jpeg" in f): # If Image file transcribe it with the OCR image_name = ImageFeeder.OCR_transcription( folder_path, new_path, f) self.event_session.log_files.append( LogFile(image_name, os.path.join(new_path, image_name))) else: # Copy the file into the hidden directory and appends it to the logFile list shutil.copy(os.path.join(folder_path, f), new_path) self.event_session.log_files.append( LogFile(f, new_path + "/" + f)) # Sends the log file list to the table manager to populate the log file table. self.table_manager.populate_log_file_table( self.event_session.log_files)
def __init__(self, fname, outPath='', chainIdOffset=0, capBreaks=0, chainMask=0, log=None): """ @param fname: pdb filename @type fname: str @param outPath: path for log file @type outPath: str @param chainIdOffset: start chain numbering at this offset @type chainIdOffset: int @param capBreaks: add ACE and NME to N- and C-term. of chain breaks [0] @type capBreaks: 0|1 @param chainMask: chain mask for overriding the default sequence identity [None] @type chainMask: [1|0] @param log: LogFile object @type log: object """ self.pdb = Structure(fname) self.fname = fname self.outPath = T.absfile(outPath) self.chainIdOffset = chainIdOffset self.capBreaks = capBreaks self.log = LogFile(T.absfile(outPath) + '/' + self.pdbname() + '.log') if log: self.log = log self.chains = self.pdb.peptide_chains self.counter = -1 self.threshold = 0.9 # sequence identity between multiple copies in PDB self._expressionCheck( "[^\n].*[Hh][Oo][Mm][Oo].?[Dd][Ii][Mm][eE][Rr].*\n", 'HOMODIMER') self._expressionCheck("[^\n].*[Tt][Rr][Ii][Mm][Ee][Rr].*\n", 'TRIMER') self._hetatomCheck() self.log.add("Separate chains: \n------------------") self._removeDuplicateChains( chainMask) # keep only one copy of molecule self._separateChainBreaks() self._assign_seg_ids() # new segment id for each chain
def test_without_GPSFile(self): log = LogFile() log.loadFromFile("Random_NoGPS.test") time = log.getStartTime() end = log.getEndTime() fuel = log.getFuelConsumption() self.assertTrue(fuel is not None) self.assertEqual(time, "01-01-2000;00:00:00") self.assertEqual(end, "01-01-2000;00:00:00")
def __init__(self, apikey, userlibrary_id, usercollection_name, workmode): self.__log = LogFile('ZoteroSync').log self.__zot = zotero.Zotero(userlibrary_id, "user", apikey) self.__collID = ZoteroLibs.findCollectionID(self.__zot, usercollection_name) self.__modes = workmode self.collateMaps()
def main(): # Get params if not len(sys.argv)==4: print "usage: {0} <LOG FILENAME> <MONTH> <DAY>".format(sys.argv[0]) sys.exit(1) else: log_filename = sys.argv.pop(1) if not os.path.exists(log_filename): print "there is no such file" sys.exit(1) try: target_date = datetime.datetime.strptime('{0} {1} {2}'.format(sys.argv.pop(1), sys.argv.pop(1), datetime.datetime.now().year), "%m %d %Y").date() except: print "wrong date" sys.exit(1) # Check source_filesize = os.path.getsize(log_filename) if source_filesize == 0: print "empty file" sys.exit(1) # Searching start = time.time() with LogFile(log_filename) as lf: lf.date = target_date if not lf.fast_rewind(): stop = time.time() print "date does not exist in file (time spent: %.1f)"%(stop-start) sys.exit(1) if lf.back_rewind(): if lf.forward_rewind(): # found first occurence of date pass else: # impossible stop = time.time() raise Exception("cannot return to date (time spent: %.1f)"%(stop-start)) else: # last read date is the lowest pass stop = time.time() print "found: %s, time spent: %.1f"%(str(lf.logline.datetime), stop-start) start = time.time() trunc_filename = log_filename+".trunc" copy_logfile_till_end(lf, trunc_filename) stop = time.time() print "wrote: %s, time spent: %.1f"%(trunc_filename, stop-start)
def _convertOptions(o): o['i'] = _str2tags(T.toList(o['i'])) o['e'] = _str2tags(T.toList(o['e'])) o['v'] = int(o['v']) o['nox'] = ('nox' in o) o['dry'] = ('dry' in o) o['debug'] = ('debug' in o) if o['log']: o['log'] = LogFile(o['log']) else: o['log'] = StdLog() o['p'] = T.toList(o['p'])
def learn_and_dump_model(): train = LogFile("../Data/bpic2018.csv", ",", 0, 30000, "startTime", "case", activity_attr=None, integer_input=False, convert=False) train.remove_attributes(["eventid", "identity_id", "event_identity_id", "year", "penalty_", "amount_applied", "payment_actual", "penalty_amount", "risk_factor", "cross_compliance", "selected_random", "selected_risk", "selected_manually", "rejected"]) train.convert2int() model = cd.create_model(train, train) with open("model_30000b", "wb") as fout: pickle.dump(model, fout)
def __init__(self): self.log = LogFile() self.log.info('####starting load config.txt####') if (not os.path.isfile(self.configFileName)): self.writeConfigFile() f = file(self.configFileName) ConfigParser.RawConfigParser.OPTCRE = re.compile( r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.CONFIG = ConfigParser.ConfigParser() self.CONFIG.read(self.configFileName) self.USER = {} USERNAME = self.CONFIG.get('USER', 'name').split("|") CPUID = self.CONFIG.get('USER', 'cpuID').split("|") HDDID = self.CONFIG.get('USER', 'hddID').split("|") MACID1 = self.CONFIG.get('USER', 'macID1').split("|") MACID2 = self.CONFIG.get('USER', 'macID2').split("|") i = 0 for key in USERNAME: userInfo = UserInfo(key, CPUID[i], HDDID[i], MACID1[i], MACID2[i]) self.USER[key] = userInfo i = i + 1 self.HOST = self.CONFIG.get('URL', 'host') self.AUTHCODEURL = 'http://' + self.HOST + self.CONFIG.get( 'URL', 'authCodeUrl') self.POSTURL = 'http://' + self.HOST + self.CONFIG.get( 'URL', 'postUrl') for key, value in self.USER.items(): str = "USERNAME:", key, value.data self.log.info(str) # print self.USER['阳葵'].data self.log.info('authCodeUrl:' + self.AUTHCODEURL) self.log.info('postUrl:' + self.POSTURL) self.log.info('####end load config.txt####')
def __init__(self, localfolder, remotefolder): gauth = GoogleAuth( ) # Create local webserver and auto handles authentication. gauth.LocalWebserverAuth() # An appropriate settings.yaml must exist #gauth.CommandLineAuth() self.__log = LogFile('GoogleSync').log self.__local = localfolder self.__remote = remotefolder self.__drive = GoogleDrive(gauth) self.__folderID = GoogleCommonLib.getFolderId(self.__drive, remotefolder, True) self.sync()
def createLogFile(self, filename, creator, eventType): logFile = None if ".pdf" in filename: logFile = PDFLogFile() elif ".mp4" in filename: logFile = VideoLogFile() elif ".mp3" in filename or ".wav" in filename: logFile = AudioLogFile() elif ".tiff" in filename or ".PNG" in filename or ".JPG" in filename: logFile = ImageLogFile() else: logFile = LogFile(self.splunkInterface) if logFile != None: logFile.creator = creator logFile.filename = filename logFile.eventType = eventType self.addLogFile(logFile) return True return False
def breast_discrete_exec(): data = "../Data/breast_data.csv" labels = "../Data/breast_labels.csv" log = pd.read_csv(data, header=None) labels = pd.read_csv(labels, header=None) log["Label"] = labels[0] cols = [] for c in log.columns: cols.append("V" + str(c)) log.columns = cols log['ID'] = log.reset_index().index print(log) train = log[:100] test = log[100:] train = train[train.VLabel == 0].drop(columns=["VLabel"]) train.to_csv("../Data/breast_train.csv", index=False) test.to_csv("../Data/breast_test.csv", index=False) train_data = LogFile("../Data/breast_train.csv", ",", 0, 500000, None, "ID", activity_attr="Activity") train_data.k = 0 model = edbn.train(train_data) test_data = LogFile("../Data/breast_test.csv", ",", 0, 500000, None, "ID", activity_attr="Activity") test_data.k = 0 print(test_data.data) edbn.test(test_data, "../Data/breast_discrete_output.csv", model, "VLabel", "0") plot.plot_single_roc_curve("../Data/breast_discrete_output.csv", "breast_discrete") plot.plot_single_prec_recall_curve("../Data/breast_discrete_output.csv", "breast_discrete")
def main(): folium_map = FoliumMap() log_file = LogFile() if log_file.FileName is None: sys.exit() folium_map.set_file_name(log_file.FileName) nmea_decode = NmeaDecode() nmea_decode.set_file_name(log_file.FileName, log_file.FileNameSorted) nmea_decode.decode() folium_map.add_navigate_data_list(nmea_decode) # log_file = LogFile() # if log_file.FileName is not None: # nmea_decode = NmeaDecode() # nmea_decode.set_file_name(log_file.FileName, log_file.FileNameSorted) # nmea_decode.decode() # folium_map.add_navigate_data_list(nmea_decode, color="red") # # log_file = LogFile() # if log_file.FileName is not None: # nmea_decode = NmeaDecode() # nmea_decode.set_file_name(log_file.FileName, log_file.FileNameSorted) # nmea_decode.decode() # folium_map.add_navigate_data_list(nmea_decode, color="yellow") if len(folium_map.LocationList) > 0: folium_map.save() webbrowser.open(folium_map.FileName) # gmplot_map = GMPlotMap() # gmplot_map.set_file_name(log_file.FileName) # gmplot_map.set_navigate_data_list(nmea_decode.NavigateDataList) # gmplot_map.draw() return 0
def __init__(self, fname, outPath='', chainIdOffset=0, capBreaks=0, chainMask=0, log=None ): """ @param fname: pdb filename @type fname: str @param outPath: path for log file @type outPath: str @param chainIdOffset: start chain numbering at this offset @type chainIdOffset: int @param capBreaks: add ACE and NME to N- and C-term. of chain breaks [0] @type capBreaks: 0|1 @param chainMask: chain mask for overriding the default sequence identity [None] @type chainMask: [1|0] @param log: LogFile object @type log: object """ self.pdb = Structure(fname); self.fname = fname self.outPath = T.absfile( outPath ) self.chainIdOffset = chainIdOffset self.capBreaks = capBreaks self.log = LogFile( T.absfile(outPath)+'/' + self.pdbname()+'.log') if log: self.log = log self.chains = self.pdb.peptide_chains self.counter = -1 self.threshold = 0.9 # sequence identity between multiple copies in PDB self._expressionCheck( "[^\n].*[Hh][Oo][Mm][Oo].?[Dd][Ii][Mm][eE][Rr].*\n", 'HOMODIMER') self._expressionCheck("[^\n].*[Tt][Rr][Ii][Mm][Ee][Rr].*\n", 'TRIMER') self._hetatomCheck() self.log.add("Separate chains: \n------------------") self._removeDuplicateChains(chainMask) # keep only one copy of molecule self._separateChainBreaks() self._assign_seg_ids() # new segment id for each chain
def main(): """ Main function to execute the datalogging process. When the script gets executed, this function will be executed. """ ### GPIO configuration # BCM numeration for the GPIOs GPIO.setmode(GPIO.BCM) # Configure the GPIOs as outputs GPIO.setup(17, GPIO.OUT) GPIO.setup(27, GPIO.OUT) GPIO.setup(22, GPIO.OUT) # No warnings GPIO.setwarnings(False) # Define variables for the GPIOs representing the color for the RGB RGBblue = 17 RGBred = 27 RGBgreen = 22 # Set the RGB to white light (all colors) GPIO.output(RGBblue, GPIO.LOW) GPIO.output(RGBred, GPIO.LOW) GPIO.output(RGBgreen, GPIO.LOW) GPIO.output(RGBblue, GPIO.HIGH) GPIO.output(RGBred, GPIO.HIGH) GPIO.output(RGBgreen, GPIO.HIGH) ### Create threads (GPS & temperature) # Create an instance of the GpsPoller and start the thread for its polling gpsp = GpsPoller() gpsp.start() # Create an instance of the TempPoller and start the thread for its polling temperature = TempPoller() temperature.start() ### Necessary variables initialization # Line counter. Is necessary to manage the different sample rates of the # different signals i = 0 # No connection yet connection = None NotConnected = True # Error count to detect the moment when ignition is turned off at the end # of a Driving Cycle errorcnt = 0 wait_gps_count = 0 HasConnection = True # Not only GPS logging (yet) --> emergeny mode if no connection to the OBD OnlyGPSMode = 0 # No OBD errors yet OBDError = 0 # Create a CSV file name with the date, time and suffix "test" filename = datetime.datetime.now().strftime( "%y_%m_%d_%H:%M:%S_") + "test.csv" # Get the time how long the system is on start = uptime() ### Set up OBD connection # Try to establish a connection with the OBD dongle while NotConnected: if OBDError % 2 == 1: GPIO.output(RGBgreen, GPIO.HIGH) else: GPIO.output(RGBgreen, GPIO.LOW) try: # Connect to OBD dongle print("creating OBD object") connection = obd.OBD() # Try to connect to OBD dongle print("retrieving OBD status") print(connection.status()) # Print OBD Status for debugging # If the return of query RPM signal is not null # --> Connecting succeeded if (connection.status() == obd.utils.OBDStatus.CAR_CONNECTED and (connection.query(obd.commands.RPM).is_null() == False)): NotConnected = False print("Successful connected to OBDII!" ) # Connecting to OBD dongle succeeded time.sleep(1) # Connection not successful: Sleep 1s before trying to connect to OBD dongle again else: time.sleep(1) # Cannot connect to the OBD: wait, add an OBD error and try again except Exception as e: print("Exception : ", e) print("Error Connecting to OBD-Adapter (" + str(OBDError) + ")") time.sleep(1) OBDError += 1 # If could not connect to the OBD for the tenth time, use the only GPS # mode to log only the GPS and temperature signal if OBDError == 5: NotConnected = False OnlyGPSMode = 1 print("running in OnlyGPSMode now") ### pid01_1, pid01_2, pid01_3, pid01_4, pid01_5, pid01_6, pid09 = obd_find_signals( ) with open('setup.json') as json_file: setup = json.load(json_file) obd_signals = setup("obd_signals") # concatenate signals, format to binary available_signals = {} for key in obd_signals.keys(): available_signals[key] = check_signal( obd_signals.get(key), pid01_1 + pid01_2 + pid01_3 + pid01_4 + pid01_5 + pid01_6 + pid09) ### Creation LogFile object and variables # Create an object of LogFile log = LogFile() # Reset the OBD errors to 0 OBDError = 0 # Mode has been stated, need to start only GPS mode or normal mode temp = True # Prefix and suffix for the logfile that will be created, to difference # between a file for the normal or the only GPS mode stri = "" stri_end = ".keep" ### Handling onlyGpsMode # Handle only GPS Mode: check if GPS data available (until connection works) while temp and OnlyGPSMode == 1 and wait_gps_count <= 20: if wait_gps_count % 2 == 1: GPIO.output(RGBred, GPIO.LOW) else: GPIO.output(RGBred, GPIO.HIGH) # Get the current value from the GPS report = gpsp.get_current_value() wait_gps_count += 1 print("Report: ", report) ### Check for GPS connection # If the JSON objecthas the right class, there is a GPS connection if report['class'] == 'TPV': # If the longitude, latitude and altitude are existing, the # connection was successful if hasattr(report, 'lon') and hasattr(report, 'lat') and hasattr( report, 'alt'): print("GPS found-> Only GPS Mode") # Set Colour to Cyan GPIO.output(RGBblue, GPIO.LOW) GPIO.output(RGBred, GPIO.LOW) GPIO.output(RGBgreen, GPIO.LOW) GPIO.output(RGBblue, GPIO.HIGH) # Set the prefix and suffix to mark the CSV file for only GPS stri = "GPS_" stri_end = "x" # Set that the only GPS mode can be executed OnlyGPSMode = 2 temp = False # Not able to connect: Wait a second and try again else: time.sleep(1) ### Creation logfile # Create a logfile log.createLogfile(stri + filename + stri_end) # No VIN yet vin = 12345 # Get the time how long the system is on start = uptime() ### Perform datalogging try: ### Execute onlyGpsMode while OnlyGPSMode == 2: # Line counter for control of sample rates i = i + 1 # If the counter is too big (--> StopIteration): reset if i == 2048: i = 0 # Execute the function for the only GPS mode GPS_Only(log, i, start, temperature, gpsp) ### Execute normal mode ### Set up normal mode # If OBD is successfully connected: Set the normal mode up if connection is not None and connection.status( ) == obd.utils.OBDStatus.CAR_CONNECTED and HasConnection: print("Trying to get RPM") if available_signals.get("RPM") == 1: response = connection.query(obd.commands.RPM) print("RPM query response: ", response) else: print("RPM is not an available signal on this vehicle") # Trying to get Vehicle Identification Number if the signal is not available assign 1234 print("Trying to get VIN") if available_signals["VIN"] == 1: c = OBDCommand("VIN", "Get Vehicle Identification Number", b"0902", 20, raw_string, ECU.ENGINE, False) response = connection.query(c, force=True) vin = LogFile.parseVIN(response.value) print("VIN: ", vin) else: print("Vin is not an available signal on this vehicle") vin = 1234 # connection.close() # Change to asynchronous connection: last value always immediatly # retrievable connection = obd.Async() # Keep track of the RPM (constantly get its value) connection.watch(obd.commands.RPM) # Keep track of every defined OBD signal for signal in signals.getOBDSignalList(): print("watching ", signal.name) connection.watch(obd.commands[signal.name]) # Start the update loop of the OBD values connection.start() # Wait a moment time.sleep(0.5) # Set RGB colour to pink GPIO.output(RGBblue, GPIO.LOW) GPIO.output(RGBred, GPIO.LOW) GPIO.output(RGBgreen, GPIO.LOW) GPIO.output(RGBblue, GPIO.HIGH) GPIO.output(RGBred, GPIO.HIGH) ### Perform normal mode # Normal Mode: OBD-, GPS-, Temperature-Data while connection is not None and connection.status( ) == obd.utils.OBDStatus.CAR_CONNECTED and HasConnection: ### Ignition off? # Error handling to detect IGNITION OFF Signal (RPM is 0 then) if available_signals["RPM"] == 1: if connection.query(obd.commands.RPM).is_null() is True: print("Error") errorcnt += 1 print(errorcnt) # If RPM is not 0, reset the errors (just a disruption) else: errorcnt = 0 # If the fifth error occured,most likely the ignition is off if errorcnt >= 5: print("End: Too many Errors - Ignition seems to be off") # No connection anymore HasConnection = False # Turn off the RGB GPIO.output(RGBblue, GPIO.LOW) GPIO.output(RGBred, GPIO.LOW) GPIO.output(RGBgreen, GPIO.LOW) ### Signal recording # Increment the line counter for the control of the sample rates i = i + 1 # If counter is too big, reset becuase of StopIteration if i == 2048: i = 0 # Get actual time data # timestr = str(datetime.datetime.now()) # Get the time how long the system is on timestr = uptime() # Calculate the time since the start of measurement timestr = timestr - start # Create a list to for the signal values result = [timestr] # Append the calculated time # Set the GPS and Temperature variables to initial values (for the # case that no value is recorded) lon = None lat = None gpsTime = None internalTemp = None alt = None # Get GPS data (if possible) if i % signals.getSignal("GPS_Long").sampleRate == 0: report = gpsp.get_current_value() (lon, lat, alt, gpsTime) = getGpsData(report) # Get internal tempterature data if i % signals.getSignal("INTERNAL_AIR_TEMP").sampleRate == 0: internalTemp = temperature.get_current_value() # Get OBD data for every defined OBD signal for signal in signals.getOBDSignalList(): # Handle the different sample times (with counter i) if i % signal.sampleRate == 0: r = connection.query(obd.commands[signal.name]) # If the response is null, append a 0 as value, else the # returned value if r.is_null(): result.append(0) else: result.append(r.value.magnitude) # If no sample for this time, append None else: result.append(None) # Append GPS-Data (if available) result.append(lon) result.append(lat) result.append(alt) result.append(gpsTime) # Append Temperature-Data (if available) result.append(internalTemp) result.append(vin) ### Recorded data to buffer # Append the list of values to the buffer (dictionary of the # signals with a list of its values as value) log.addData(result) # Write the VIN only once --> does not change and reduce data amount if vin is not None: vin = None # Wait a moment to limit the data amount time.sleep(0.5) ### Buffer to file # Every 20 rows of measurement: append the buffer data to the CSV file if i % 20 == 0: log.appendFile() print("Appending File ...") ### Ignition is off # Append the buffer data to the CSV file log.appendFile() print("Ignition Off") print("\nKilling Threads..") ### End threads # End the GPS polling thread gpsp.running = False gpsp.join() # End the temperature polling thread temperature.running = False temperature.join() ### Disconnect OBD # Stop the connection to the OBD connection.stop() ### Configure GPIOs # Turn off the RGB GPIO.output(RGBblue, GPIO.LOW) GPIO.output(RGBred, GPIO.LOW) GPIO.output(RGBgreen, GPIO.LOW) # Reset the GPIO status GPIO.cleanup() ### Error occured except (KeyboardInterrupt, SystemExit): ### Configuration GPIOs # Turn of the RGB GPIO.output(RGBblue, GPIO.LOW) GPIO.output(RGBred, GPIO.LOW) GPIO.output(RGBgreen, GPIO.LOW) # Reset the GPIO status GPIO.cleanup() print("Excpetion:") print("\nKilling Threads..") ### Buffer to file # Append the buffer data to the CSV file log.appendFile() ### End threads # End the GPS polling thread gpsp.running = False gpsp.join() # End the temperature polling thread temperature.running = False temperature.join() ### Disconnect OBD # Stop the connection to the OBD connection.stop()
rootDir = "/etc/itd/cloudcam/ID-Reader" logfile = rootDir + "/consolelogs.log" def init(): try: fileConfig = open(wsHelper.rootDir + "/config.xml", "r") for line in fileConfig: if re.search("logfile", line): logfile = line.split("=")[1].strip() break except FileNotFoundError as e: print ("config.xml find not found") except IndexError as e: print ("Format Error in config.xml") else: fileConfig.close() if __name__ == '__main__': wsHelper = WebServiceHelper(logfile, "","", "", rootDir) #default values logfile logfile = wsHelper.rootDir + '/consolelogs.log' init() consoleLog = LogFile(logfile) if len(sys.argv) >= 2: qrcode = sys.argv[1] wsHelper.saveQRCodeToFile(qrcode) consoleLog.showLog (qrcode) else: consoleLog.showLog ('Please make sure arguments are correctly: %QR% \n') sys.exit()
def open_log(fn, *params, **args): global _LogFile _LogFile = LogFile(fn, *params, **args)
def testParseMethodReturnsZeroline_countOnNoMatches(self): test_instance = LogFile("./test_file_one.txt", "\w{3} \d{1,2}", "dummy date format") test_instance.parse() assert test_instance.line_count == 0
from LogFile import LogFile from DelimFile import DelimFile log = LogFile("log.txt") myDelim = DelimFile("data.csv", ",") log.write("This is a log message") log.write("This is another log message") myDelim.write(['a', 'b', 'c', 'd']) myDelim.write(['1', '2', '3', '4'])
def open_debug(fn, *params, **args): global _DebugFile _DebugFile = LogFile(fn, *params, **args)
class CNNPlayer(Player): def __init__(self, agent_filepath=""): Player.__init__(self) # Create the experience memory database if not os.path.exists(REPLAY_MEMORY_FILENAME): self.replay_memory = ReplayMemory() else: self.replay_memory = cPickle.load(open(REPLAY_MEMORY_FILENAME, 'r')) # Initialize the convolutional neural network self.network = MinecraftNet(agent_filepath) self.ae_network = FeatureNet() # Probability of selecting non-random action self.epsilon = STARTING_EPSILON # The total number of frames this agent has been trained on # through all the minibatch training self.frames_trained = 0 # Load old epsilon and frames learned values self.load() self.cnn_action_map = self.initActionMap() # The current and previous sequences of game frames and actions self.current_seq = None self.previous_seq = None self.previous_action = None # Event logging self.log = LogFile("run.log", True) #self.log.logMessage("INITIAL NETWORK PARAMS: %s" % str(self.network.solver.net.params['ip1'][0].data[...])) # Create a map of all the CNN's legal actions # We will be able to pick the best move from this list based on the CNN's output def initActionMap(self): actions = [] # Populate with all 18 legal actions # (break_block, updown_rot, leftright_rot, forwardback, leftright) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=0.0, forwardback=0, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=0.0, forwardback=1, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=0.0, forwardback=-1, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append( Action.Action(False, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=0.0, forwardback=0, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=0.0, forwardback=1, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=0.0, forwardback=-1, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append( Action.Action(True, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) return actions def getActionMapIndex(self, action): for i in range(len(self.cnn_action_map)): if action == self.cnn_action_map[i]: return i self.log.logError("ACTION %s NOT FOUND IN ACTION MAP" % str(action)) sys.exit(1) def sequenceForward(self, seq): cnn_input = seq.toCNNInput() output = self.network.forward(cnn_input) return output def pickBestAction(self, seq): cnn_outputs = self.sequenceForward(seq) self.log.logMessage("REINFORCEMENT NET OUTPUT: " + str(cnn_outputs)) max_output_index = 0 max_output = cnn_outputs[0] for i in range(len(cnn_outputs)): if cnn_outputs[i] > max_output: max_output = cnn_outputs[i] max_output_index = i self.log.logMessage("BEST ACTION CHOSEN: %s" % str(self.cnn_action_map[max_output_index])) return self.cnn_action_map[max_output_index] def pickRandomAction(self): return random.choice(self.cnn_action_map) def load(self): if os.path.exists(CNNPLAYER_SAVE_FILENAME): f = open(CNNPLAYER_SAVE_FILENAME, 'r') tokens = f.read().split() self.epsilon, self.frames_trained = float(tokens[0]), int( tokens[1]) f.close() def save(self): # Save the replay memory as a pickled file o = open(REPLAY_MEMORY_FILENAME, 'w') cPickle.dump(self.replay_memory, o) o.close() o = open(CNNPLAYER_SAVE_FILENAME, 'w') o.write("%.8f %d" % (self.epsilon, self.frames_trained)) o.close() # Log the last network weights #self.log.logMessage("FINAL NETWORK PARAMS: %s" % str(self.network.solver.net.params['ip1'][0].data[...])) # Train the agent's CNN on a minibatch of Experiences def trainMinibatch(self): self.log.logMessage("TRAINING MINIBATCH") self.frames_trained += TRAINING_BATCH_SIZE experiences = self.replay_memory.get_random(TRAINING_BATCH_SIZE) inputs = [] labels = [] for experience in experiences: cnn_outputs = self.sequenceForward(experience.curr_seq) #best_action = self.pickBestAction(experience.curr_seq) target_vector = [] for act in cnn_outputs: #act = cnn_outputs[act_id] act_target = experience.curr_reward + GAMMA * act target_vector.append(act_target) #target = experience.curr_reward + GAMMA * best_action_output inputs.append(experience.prev_seq) labels.append(target_vector) #dataset.append((experience.prev_seq, target)) #Do gradient descent to minimize (target - network.forward(experience.prev_seq)) ^ 2 # print("INPUTS:", inputs) # print("LABELS:", labels) #self.network.set_input_data(inputs, labels) self.network.set_train_input_data(inputs, labels) self.network.train( BATCH_TRAINING_ITERATIONS) # train for a single iteration # Receive the agent's reward from its previous Action along with # a Frame screenshot of the current game state def getDecision(self, current_frame): self.log.logMessage("DECISION #%d in GAME FRAME #%d" % (self.actions_performed, self.game.world_counter)) self.log.logMessage("TRAINED ON %d FRAMES" % (self.frames_trained)) features = self.ae_network.encodeNumpyArray(current_frame.pixels) #self.log.logMessage("Current frame yields features: %s" % str(features)) if self.previous_reward != 0: self.log.logMessage("GOT REWARD: %d" % self.previous_reward) self.total_score += self.previous_reward # First frame of game if self.actions_performed == 0: self.actions_performed += 1 self.previous_seq = Sequence(features) # print("FRAME SEQUENCE: {0}".format(self.previous_seq)) curr_action = self.pickRandomAction() self.previous_seq = self.previous_seq.createNewSequence( curr_action) self.previous_action = curr_action # print("FIRST SEQUENCE: {0}".format(self.previous_seq)) return # Should I make a random move? r = random.random() # Add on the current frame to the current sequence self.current_seq = self.previous_seq.createNewSequence(features) if r > self.epsilon or self.actions_performed < 4: #not self.current_seq.isFull(): curr_action = self.pickRandomAction() else: # Run the CNN and pick the max output action curr_action = self.pickBestAction(self.current_seq) # Finally, add the chosen action to the current sequence self.current_seq = self.current_seq.createNewSequence(curr_action) # Actually perform the action in the game self.performAction(curr_action) new_experience = Experience(self.previous_seq, self.previous_action, self.previous_reward, self.current_seq) self.replay_memory.store(new_experience) self.previous_seq = self.current_seq if self.game.world_counter > STARTING_FRAMES and self.game.world_counter % BATCH_TRAINING_FREQUENCY == 0: self.trainMinibatch() # Remember the chosen Action since it will be required for the next iteration self.previous_action = curr_action if self.epsilon < MAX_EPSILON: self.epsilon *= EPSILON_UPDATE self.log.logMessage("UPDATED EPSILON: %.5f" % self.epsilon)
#!/usr/local/bin/python3.4 # This is the executable for this project. When run, it triggers a parse of the # current records, and continues that process until killed. from LogFile import LogFile from DNSLogDB import DNSLogDB from RadiusDB import RadiusDB from Config import config from time import sleep if __name__ == '__main__': dnslogdb = DNSLogDB(config['databases']['dnslog']) radiusdb = RadiusDB(config['databases']['radius']) #dnslogfile = LogFile(config['inputs']['logFile'], dnslogdb, radiusdb) dnslogfile = LogFile('/data/ko/rsyslog/dnslog.log.backlog', dnslogdb, radiusdb) dnslogfile.digestFucked() sleep(1) print('You\'re finally done with this shitstorm. Now stop trusting people to not restart things without telling you.')
class AnalyzerGTK: def __init__(self): # Set the Glade file self.gladefile = "analyzer.glade" self.wTree = gtk.glade.XML(self.gladefile) # Get the Main Window and connect the "destroy" event self.window = self.wTree.get_widget("MainWindow") if (self.window): self.window.connect("destroy", gtk.main_quit) # Hook up the signals dic = { "on_menuOpen_activate": self.menuOpen_activate, "on_menuQuit_activate": self.menuQuit_activate, "on_menuAbout_activate": self.menuAbout_activate, "on_MainWindow_destroy": gtk.main_quit, "on_NetworkList_row_activated": self.plotRows, "on_btnPlotAnalysis_clicked": self.plotAnalysis, "on_checkShowTemplate_clicked": self.showTemplate, "on_btnClearFilter_clicked": self.clearFilter, "on_tbClear_clicked": self.clearPlot } self.wTree.signal_autoconnect(dic) # Create a LogFile object to handle data self.logfile = LogFile() # Create an empty plot window for tuning curves self.figureDT = Figure(figsize=(6,4), dpi=72) self.axisDT = self.figureDT.add_subplot(111) self.axisDT.set_xlabel('Duration') self.axisDT.set_ylabel('Mean Number of Spikes') self.axisDT.grid(True) self.canvasDT = FigureCanvasGTK(self.figureDT) # a gtk.DrawingArea self.canvasDT.show() self.graphviewDT = self.wTree.get_widget("vboxTuning") self.graphviewDT.pack_end(self.canvasDT) self.maxSpikes = 1 self.showTemplate = False # Create an empty plot window for analysis self.figureAN = Figure(dpi=72) self.axisAN = self.figureAN.add_subplot(111) self.canvasAN = FigureCanvasGTK(self.figureAN) self.canvasAN.show() self.graphviewAN = self.wTree.get_widget("vboxAnalysis") self.graphviewAN.pack_end(self.canvasAN) # Setup the analyze window self.cbXAxis = gtk.combo_box_new_text() self.cbYAxis = gtk.combo_box_new_text() self.cbZAxis = gtk.combo_box_new_text() self.cbXAxis.show() self.cbYAxis.show() self.cbZAxis.show() self.hboxAnalyze = self.wTree.get_widget("hboxAnalyze") labelX = gtk.Label("X-Axis") labelX.show() labelY = gtk.Label("Y-Axis") labelY.show() labelZ = gtk.Label("Z-Axis") labelZ.show() self.hboxAnalyze.pack_start(labelX) self.hboxAnalyze.pack_start(self.cbXAxis) self.hboxAnalyze.pack_start(labelY) self.hboxAnalyze.pack_start(self.cbYAxis) self.hboxAnalyze.pack_start(labelZ) self.hboxAnalyze.pack_start(self.cbZAxis) def plotAnalysis(self, widget): Xvar = self.cbXAxis.get_active_text() Yvar = self.cbYAxis.get_active_text() Zvar = self.cbZAxis.get_active_text() if Xvar == None or Yvar == None or Zvar == None: return if Zvar == "None": XvarIndex = self.logfile.params().index(Xvar)+1 YvarIndex = self.logfile.params().index(Yvar)+1 rowiter = self.treemodelsorted.get_iter_first() values = defaultdict(list) while rowiter != None: X = self.treemodelsorted.get_value(rowiter,XvarIndex) Y = self.treemodelsorted.get_value(rowiter,YvarIndex) values[float(X)].append(float(Y)) rowiter = self.treemodelsorted.iter_next(rowiter) X = [] Y = [] for k in sorted(values.keys()): X.append(k) Y.append(mean(values[k])) self.axisAN.cla() self.figureAN.clf() self.axisAN = self.figureAN.add_subplot(111) self.axisAN.plot(X,Y, 'k', linewidth=4) self.axisAN.set_xlabel(Xvar) self.axisAN.set_ylabel(Yvar) self.canvasAN.draw() else: XvarIndex = self.logfile.params().index(Xvar)+1 YvarIndex = self.logfile.params().index(Yvar)+1 ZvarIndex = self.logfile.params().index(Zvar)+1 rowiter = self.treemodelsorted.get_iter_first() values = {} Ykeys = [] while rowiter != None: X = self.treemodelsorted.get_value(rowiter,XvarIndex) Y = self.treemodelsorted.get_value(rowiter,YvarIndex) Z = self.treemodelsorted.get_value(rowiter,ZvarIndex) Ykeys.append(Y) values.setdefault(X,defaultdict(list))[Y].append(Z) rowiter = self.treemodelsorted.iter_next(rowiter) Ykeys = unique(Ykeys) XY = [] for k in sorted(values.keys()): tmp = [] for k2 in sorted(Ykeys): if values[k].has_key(k2): tmp.append(mean(values[k][k2])) else: tmp.append(0) XY.append(tmp) Z = array(XY) self.axisAN.cla() self.figureAN.clf() self.axisAN = self.figureAN.add_subplot(111) im = NonUniformImage(self.axisAN, interpolation='nearest', extent=(min(values.keys()),max(values.keys()),min(Ykeys),max(Ykeys))) im.set_data(values.keys(), Ykeys, Z.transpose()) self.axisAN.images.append(im) self.axisAN.set_xlim(min(values.keys()),max(values.keys())) self.axisAN.set_ylim(min(Ykeys),max(Ykeys)) self.axisAN.set_xlabel(Xvar) self.axisAN.set_ylabel(Yvar) self.axisAN.set_title(Zvar) self.figureAN.colorbar(im) self.canvasAN.draw() def showTemplate(self, widget): self.showTemplate = widget.get_active() def plotRows(self, widget, path, column): (model, pathlist) = self.treeview.get_selection().get_selected_rows() for p in pathlist: treeiter = self.treemodelsorted.get_iter(p) X = self.logfile.getdurs() Y = self.logfile.getresults(self.treemodelsorted.get_value(treeiter,0)) tmp = Y[:] tmp.append(self.maxSpikes) self.maxSpikes = max(tmp) self.axisDT.plot(X, Y, linewidth=2) self.axisDT.set_ylim( (0, self.maxSpikes+0.1) ) if self.showTemplate: Y = self.logfile.getresults(0) tmp = Y[:] tmp.append(self.maxSpikes) self.maxSpikes = max(tmp) self.axisDT.plot(X, Y, 'k', linewidth=3) self.axisDT.set_ylim( (0, self.maxSpikes+0.1) ) self.canvasDT.draw() def clearPlot(self, widget): self.maxSpikes = 1 self.axisDT.cla() self.axisDT.set_xlabel('Duration') self.axisDT.set_ylabel('Mean Number of Spikes') self.axisDT.grid(True) self.canvasDT.draw() def clearFilter(self, widget): for i in range(len(self.filtercboxes)): self.filtercboxes[i].set_active(0) def applyFilter(self,model,iter): show = True for i in range(len(self.filtercboxes)): f = self.filtercboxes[i].get_active_text() show = show and (f == "All" or f == str(self.liststore.get_value(iter,i+1))) return show def updateFilter(self, widget): self.treemodelfilter = self.liststore.filter_new(root=None) self.treemodelfilter.set_visible_func(self.applyFilter) self.treemodelsorted = gtk.TreeModelSort(self.treemodelfilter) self.treeview.set_model(self.treemodelsorted) def menuOpen_activate(self, widget): chooser = gtk.FileChooserDialog(title="Open Log File", action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)) response = chooser.run() if response == gtk.RESPONSE_OK: filename = chooser.get_filename() if self.logfile.open(filename): # Data file opened # Populate the ListStore for our table self.liststore = gtk.ListStore(int, *([float] *len(self.logfile.params()))) self.treeview = self.wTree.get_widget("NetworkList") self.treeview.set_model(self.liststore) self.treeview.get_selection().set_mode(gtk.SELECTION_MULTIPLE) # Remove any old columns for c in self.treeview.get_columns(): self.treeview.remove_column(c) self.cbXAxis.get_model().clear() self.cbYAxis.get_model().clear() self.cbZAxis.get_model().clear() self.cbZAxis.append_text("None") # Add columns to filter table and fill analysis dropdowns self.filtertable = self.wTree.get_widget("FilterTable") self.filtertable.resize(len(self.logfile.params()), 2) textrenderer = gtk.CellRendererText() col = 1 self.filtercboxes = [] for p in self.logfile.params(): column = gtk.TreeViewColumn(p, textrenderer, text=col) column.set_sizing(gtk.TREE_VIEW_COLUMN_AUTOSIZE) column.set_resizable(True) column.set_clickable(True) column.set_sort_column_id(col) self.treeview.append_column(column) label = gtk.Label(p) label.set_justify(gtk.JUSTIFY_RIGHT) label.show() cbox = gtk.combo_box_new_text() self.filtercboxes.append(cbox) self.filtercboxes[-1].show() self.filtercboxes[-1].connect("changed", self.updateFilter) self.filtertable.attach(label, 0, 1, col-1, col) self.filtertable.attach(cbox, 1, 2, col-1, col) self.cbXAxis.append_text(p) self.cbYAxis.append_text(p) self.cbZAxis.append_text(p) col = col + 1 # Add data to table for n in self.logfile.networkdefs(): itt = self.liststore.append(n) # Add data to columns tmp = [] for i in range(len(self.logfile.params())): tmp.append([]) for n in self.logfile.networkdefs(): for i in range(1,len(self.logfile.params())+1): tmp[i-1].append(n[i]) for i in range(len(tmp)): tmp[i] = unique(tmp[i]) tmp[i].sort() self.filtercboxes[i].append_text("All") self.filtercboxes[i].set_active(0) for n in tmp[i]: self.filtercboxes[i].append_text(str(n)) # Setup the filtered sorted liststores self.treemodelfilter = self.liststore.filter_new(root=None) self.treemodelfilter.set_visible_func(self.applyFilter) self.treemodelsorted = gtk.TreeModelSort(self.treemodelfilter) self.treeview.set_model(self.treemodelsorted) # Destroy the file chooser chooser.destroy() def menuAbout_activate(self, widget): aboutDialog = self.wTree.get_widget("AboutDialog") response = aboutDialog.run() if response == gtk.RESPONSE_CANCEL: aboutDialog.hide() def menuQuit_activate(self, widget): gtk.main_quit()
class CNNPlayer(Player): def __init__(self, agent_filepath=""): Player.__init__(self) # Create the experience memory database if not os.path.exists(REPLAY_MEMORY_FILENAME): self.replay_memory = ReplayMemory() else: self.replay_memory = cPickle.load(open(REPLAY_MEMORY_FILENAME, 'r')) # Initialize the convolutional neural network self.network = MinecraftNet(agent_filepath) self.ae_network = FeatureNet() # Probability of selecting non-random action self.epsilon = STARTING_EPSILON # The total number of frames this agent has been trained on # through all the minibatch training self.frames_trained = 0 # Load old epsilon and frames learned values self.load() self.cnn_action_map = self.initActionMap() # The current and previous sequences of game frames and actions self.current_seq = None self.previous_seq = None self.previous_action = None # Event logging self.log = LogFile("run.log", True) #self.log.logMessage("INITIAL NETWORK PARAMS: %s" % str(self.network.solver.net.params['ip1'][0].data[...])) # Create a map of all the CNN's legal actions # We will be able to pick the best move from this list based on the CNN's output def initActionMap(self): actions = [] # Populate with all 18 legal actions # (break_block, updown_rot, leftright_rot, forwardback, leftright) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=0.0, forwardback=0, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=0.0, forwardback=1, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=0.0, forwardback=-1, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append(Action.Action(False, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=0.0, forwardback=0, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=0.0, forwardback=1, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=0.0, forwardback=-1, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=0, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=1, leftright=0)) actions.append(Action.Action(True, updown_rot=0.0, leftright_rot=-AGENT_ROTATION_SPEED, forwardback=-1, leftright=0)) return actions def getActionMapIndex(self, action): for i in range(len(self.cnn_action_map)): if action == self.cnn_action_map[i]: return i self.log.logError("ACTION %s NOT FOUND IN ACTION MAP" % str(action)) sys.exit(1) def sequenceForward(self, seq): cnn_input = seq.toCNNInput() output = self.network.forward(cnn_input) return output def pickBestAction(self, seq): cnn_outputs = self.sequenceForward(seq) self.log.logMessage("REINFORCEMENT NET OUTPUT: " + str(cnn_outputs)) max_output_index = 0 max_output = cnn_outputs[0] for i in range(len(cnn_outputs)): if cnn_outputs[i] > max_output: max_output = cnn_outputs[i] max_output_index = i self.log.logMessage("BEST ACTION CHOSEN: %s" % str(self.cnn_action_map[max_output_index])) return self.cnn_action_map[max_output_index] def pickRandomAction(self): return random.choice(self.cnn_action_map) def load(self): if os.path.exists(CNNPLAYER_SAVE_FILENAME): f = open(CNNPLAYER_SAVE_FILENAME, 'r') tokens = f.read().split() self.epsilon, self.frames_trained = float(tokens[0]), int(tokens[1]) f.close() def save(self): # Save the replay memory as a pickled file o = open(REPLAY_MEMORY_FILENAME, 'w') cPickle.dump(self.replay_memory, o) o.close() o = open(CNNPLAYER_SAVE_FILENAME, 'w') o.write("%.8f %d" % (self.epsilon, self.frames_trained)) o.close() # Log the last network weights #self.log.logMessage("FINAL NETWORK PARAMS: %s" % str(self.network.solver.net.params['ip1'][0].data[...])) # Train the agent's CNN on a minibatch of Experiences def trainMinibatch(self): self.log.logMessage("TRAINING MINIBATCH") self.frames_trained += TRAINING_BATCH_SIZE experiences = self.replay_memory.get_random(TRAINING_BATCH_SIZE) inputs = [] labels = [] for experience in experiences: cnn_outputs = self.sequenceForward(experience.curr_seq) #best_action = self.pickBestAction(experience.curr_seq) target_vector = [] for act in cnn_outputs: #act = cnn_outputs[act_id] act_target = experience.curr_reward + GAMMA * act target_vector.append(act_target) #target = experience.curr_reward + GAMMA * best_action_output inputs.append(experience.prev_seq) labels.append(target_vector) #dataset.append((experience.prev_seq, target)) #Do gradient descent to minimize (target - network.forward(experience.prev_seq)) ^ 2 # print("INPUTS:", inputs) # print("LABELS:", labels) #self.network.set_input_data(inputs, labels) self.network.set_train_input_data(inputs, labels) self.network.train(BATCH_TRAINING_ITERATIONS) # train for a single iteration # Receive the agent's reward from its previous Action along with # a Frame screenshot of the current game state def getDecision(self, current_frame): self.log.logMessage("DECISION #%d in GAME FRAME #%d" % (self.actions_performed, self.game.world_counter)) self.log.logMessage("TRAINED ON %d FRAMES" % (self.frames_trained)) features = self.ae_network.encodeNumpyArray(current_frame.pixels) #self.log.logMessage("Current frame yields features: %s" % str(features)) if self.previous_reward != 0: self.log.logMessage("GOT REWARD: %d" % self.previous_reward) self.total_score += self.previous_reward # First frame of game if self.actions_performed == 0: self.actions_performed += 1 self.previous_seq = Sequence(features) # print("FRAME SEQUENCE: {0}".format(self.previous_seq)) curr_action = self.pickRandomAction() self.previous_seq = self.previous_seq.createNewSequence(curr_action) self.previous_action = curr_action # print("FIRST SEQUENCE: {0}".format(self.previous_seq)) return # Should I make a random move? r = random.random() # Add on the current frame to the current sequence self.current_seq = self.previous_seq.createNewSequence(features) if r > self.epsilon or self.actions_performed < 4: #not self.current_seq.isFull(): curr_action = self.pickRandomAction() else: # Run the CNN and pick the max output action curr_action = self.pickBestAction(self.current_seq) # Finally, add the chosen action to the current sequence self.current_seq = self.current_seq.createNewSequence(curr_action) # Actually perform the action in the game self.performAction(curr_action) new_experience = Experience(self.previous_seq, self.previous_action, self.previous_reward, self.current_seq) self.replay_memory.store(new_experience) self.previous_seq = self.current_seq if self.game.world_counter > STARTING_FRAMES and self.game.world_counter % BATCH_TRAINING_FREQUENCY == 0: self.trainMinibatch() # Remember the chosen Action since it will be required for the next iteration self.previous_action = curr_action if self.epsilon < MAX_EPSILON: self.epsilon *= EPSILON_UPDATE self.log.logMessage("UPDATED EPSILON: %.5f" % self.epsilon)
def __init__(self): # Set the Glade file self.gladefile = "analyzer.glade" self.wTree = gtk.glade.XML(self.gladefile) # Get the Main Window and connect the "destroy" event self.window = self.wTree.get_widget("MainWindow") if (self.window): self.window.connect("destroy", gtk.main_quit) # Hook up the signals dic = { "on_menuOpen_activate": self.menuOpen_activate, "on_menuQuit_activate": self.menuQuit_activate, "on_menuAbout_activate": self.menuAbout_activate, "on_MainWindow_destroy": gtk.main_quit, "on_NetworkList_row_activated": self.plotRows, "on_btnPlotAnalysis_clicked": self.plotAnalysis, "on_checkShowTemplate_clicked": self.showTemplate, "on_btnClearFilter_clicked": self.clearFilter, "on_tbClear_clicked": self.clearPlot } self.wTree.signal_autoconnect(dic) # Create a LogFile object to handle data self.logfile = LogFile() # Create an empty plot window for tuning curves self.figureDT = Figure(figsize=(6,4), dpi=72) self.axisDT = self.figureDT.add_subplot(111) self.axisDT.set_xlabel('Duration') self.axisDT.set_ylabel('Mean Number of Spikes') self.axisDT.grid(True) self.canvasDT = FigureCanvasGTK(self.figureDT) # a gtk.DrawingArea self.canvasDT.show() self.graphviewDT = self.wTree.get_widget("vboxTuning") self.graphviewDT.pack_end(self.canvasDT) self.maxSpikes = 1 self.showTemplate = False # Create an empty plot window for analysis self.figureAN = Figure(dpi=72) self.axisAN = self.figureAN.add_subplot(111) self.canvasAN = FigureCanvasGTK(self.figureAN) self.canvasAN.show() self.graphviewAN = self.wTree.get_widget("vboxAnalysis") self.graphviewAN.pack_end(self.canvasAN) # Setup the analyze window self.cbXAxis = gtk.combo_box_new_text() self.cbYAxis = gtk.combo_box_new_text() self.cbZAxis = gtk.combo_box_new_text() self.cbXAxis.show() self.cbYAxis.show() self.cbZAxis.show() self.hboxAnalyze = self.wTree.get_widget("hboxAnalyze") labelX = gtk.Label("X-Axis") labelX.show() labelY = gtk.Label("Y-Axis") labelY.show() labelZ = gtk.Label("Z-Axis") labelZ.show() self.hboxAnalyze.pack_start(labelX) self.hboxAnalyze.pack_start(self.cbXAxis) self.hboxAnalyze.pack_start(labelY) self.hboxAnalyze.pack_start(self.cbYAxis) self.hboxAnalyze.pack_start(labelZ) self.hboxAnalyze.pack_start(self.cbZAxis)
class ChainSeparator: """ Open PDB file; give back one chain whenever next() is called. This class is used by the pdb2xplor script. This class constitutes vintage code. See L{Biskit.PDBCleaner} and L{Biskit.Mod.TemplateCleaner} for a more recent implementation of PDB cleaning. @todo: The removal of duplicate chains should be transferred to the PDBCleaner so that this class can be retired """ def __init__(self, fname, outPath='', chainIdOffset=0, capBreaks=0, chainMask=0, log=None ): """ @param fname: pdb filename @type fname: str @param outPath: path for log file @type outPath: str @param chainIdOffset: start chain numbering at this offset @type chainIdOffset: int @param capBreaks: add ACE and NME to N- and C-term. of chain breaks [0] @type capBreaks: 0|1 @param chainMask: chain mask for overriding the default sequence identity [None] @type chainMask: [1|0] @param log: LogFile object @type log: object """ self.pdb = Structure(fname); self.fname = fname self.outPath = T.absfile( outPath ) self.chainIdOffset = chainIdOffset self.capBreaks = capBreaks self.log = LogFile( T.absfile(outPath)+'/' + self.pdbname()+'.log') if log: self.log = log self.chains = self.pdb.peptide_chains self.counter = -1 self.threshold = 0.9 # sequence identity between multiple copies in PDB self._expressionCheck( "[^\n].*[Hh][Oo][Mm][Oo].?[Dd][Ii][Mm][eE][Rr].*\n", 'HOMODIMER') self._expressionCheck("[^\n].*[Tt][Rr][Ii][Mm][Ee][Rr].*\n", 'TRIMER') self._hetatomCheck() self.log.add("Separate chains: \n------------------") self._removeDuplicateChains(chainMask) # keep only one copy of molecule self._separateChainBreaks() self._assign_seg_ids() # new segment id for each chain def pdbname(self): """ Extract pdb code from file name. @return: (assumed) pdb code @rtype: str """ return T.stripFilename(self.pdb.filename) def _expressionCheck(self, findExpression, findClean): """ Check and report if the regular expression 'findExpression' exists in the PDB-file. Use this to locate data in the REMARK section of a pdb file. Prints a warning to stdOut if the regular expression is found. @param findExpression: regular expression @type findExpression: str @param findClean: clean name of regular expression @type findClean: str """ pdb = open(self.fname,'r') pdbFile = pdb.read() searchResult = re.findall(findExpression,pdbFile) warningMessage = """ WARNINGR! The text string'%s' was found in the PDB-file. If this PDB-file contains a homodimer one of the chains will be deleted by this script. To avoid this prepare the file for Xplor manualy \n""" %\ ( findClean ) warningMessage2 = """--------------------------------------------\n""" if len(searchResult) != 0: self.log.add(warningMessage) self.log.add("String found in line(s): \n") for i in range(0,len(searchResult)): self.log.add(searchResult[i]) self.log.add(warningMessage2) pdb.close() def _hetatomCheck(self): """ Check and report if there are any none-water HETATMs in the PDB-file """ pdb = open(self.fname,'r') pdbFile = pdb.read() findExpression = "HETATM.*\n" searchResult = re.findall(findExpression,pdbFile) i=0 j = len(searchResult) while i<j: if searchResult[i][17:20] == "HOH" or \ searchResult[i][0:6] != "HETATM" : del searchResult[i] i=i-1 j=j-1 i=i+1 warningMessage = """ WARNING! The PDB-file contains coordinates for none water HETATMs. If you want to keep the HETATM - prepare the file for Xplor manualy \n""" warningMessage2 = "\n"+ 80*"-" + "\n" if len(searchResult) != 0: self.log.add(warningMessage) self.log.add("String found in line(s): \n") for i in range(0,len(searchResult)): self.log.add(searchResult[i][0:-1]) self.log.add(warningMessage2) pdb.close() def _compareSequences( self, seq1, seq2 ): """ @param seq1: sequence 1 to compare @type seq1: str @param seq2: sequence 1 to compare @type seq2: str @return: identity (0.0 - 1.0) between the two sequences @rtype : float """ # compare the 2 sequences ## blast = Blast2Seq( seq1, seq2 ) ## id = blast.run() matcher = SequenceMatcher( None, ''.join(seq1) , ''.join(seq2) ) return matcher.ratio() def _removeDuplicateChains(self, chainMask=None): """ Get rid of identical chains by comparing all chains with Blast2seq. @param chainMask: chain mask for overriding the chain identity checking (default: None) @type chainMask: [int] @return: number of chains removed @rtype: int """ chainCount = len(self.chains) matrix = 1.0 * N.zeros((chainCount,chainCount)) chain_ids = [] ## create identity matrix for all chains against all chains for i in range(0, chainCount): chain_ids = chain_ids + [self.chains[i].chain_id] # collect for log file for j in range(i, len(self.chains)): # convert 3-letter-code res list into 1-letter-code String seq1 = singleAA( self.chains[i].sequence() ) seq2 = singleAA( self.chains[j].sequence() ) ## if len(seq1) > len(seq2): # take shorter sequence ## # aln len at least half the len of the shortest sequence ## alnCutoff = len(seq2) * 0.5 ## else: ## alnCutoff = len(seq1) * 0.5 ## if id['aln_len'] > alnCutoff: ## matrix[i,j] = id['aln_id'] ## else: # aln length too short, ignore ## matrix[i,j] = 0 matrix[i,j] = self._compareSequences( seq1, seq2 ) ## report activity self.log.add("\n Chain ID's of compared chains: "+str(chain_ids)) self.log.add(" Cross-Identity between chains:\n"+str(matrix)) self.log.add(" Identity threshold used: "+str(self.threshold)) ## override the automatic chain deletion by supplying a ## chain mask to this function if chainMask: if len(chainMask) == chainCount: self.chains = N.compress(chainMask, self.chains) self.log.add("NOTE: chain mask %s used for removing chains.\n"%chainMask) else: self.log.add("########## ERROR ###############") self.log.add("# Chain mask is only %i chains long"%len(chainMask)) self.log.add("# when a mask of length %i is needed"%chainCount) self.log.add("# No cleaning will be performed.\n") if not chainMask: ## look at diagonals in "identity matrix" ## (each chain against each) duplicate = len(self.chains) for offset in range(1,chainCount): diag = N.diagonal(matrix, offset ,0,1) # diagonal of 1's mark begin of duplicate avg = 1.0 * N.sum(diag)/len(diag) if (avg >= self.threshold): duplicate = offset break self.chains = self.chains[:duplicate] self.log.add("NOTE: Identity matrix will be used for removing identical chains.") ## report activit self.log.add(str(chainCount - len(self.chains))+\ " chains have been removed.\n") # how many chains have been removed? return (chainCount - len(self.chains)) def _assign_seg_ids(self): """ Assign new segment id to each chain. """ counter = self.chainIdOffset for chain in self.chains: ## Assemble segid from pdb code + one letter out of A to Z chain.segment_id = self.pdbname()[:3] + string.uppercase[counter] counter = counter + 1 try: # report changed segement ids chain_id = chain.chain_id self.log.add("changed segment ID of chain "+chain_id+\ " to "+chain.segment_id) except: T.errWriteln("_assign_seg_ids(): logerror") def _sequentialDist(self, chain, cutoff, atom): """ Calculate sequential atom-atom distance, report residues with longer distance than cutoff (chain break positions). @param chain: Scientific.IO.PDB.PeptideChain object @type chain: object @param cutoff: threshold for reporting gap (chain break) @type cutoff: float @param atom: type of atoms to check (i.e. 'CA') @type atom: str @return: list of chain break positions (residue index for each first residue of two that are too distant) @rtype: list of int """ distanceList = [] v0 = Vector( 0,0,0 ) jump = 1 for res in range(0,len(chain)-2): try: v1 = Vector(chain[res][atom].position.array) ## ignore CA with 0,0,0 coordinate if v1 != v0: jump = 1 v2 = Vector(chain[ res+jump ][atom].position.array) ## look for next CA with non-zero coordinate while v2 == v0 and jump + res < len( chain ): jump += 1 v2 = Vector(chain[ res+jump ][atom].position.array) if (v1 - v2).length() > cutoff * jump: distanceList = distanceList + [res + jump - 1] except: self.log.add( "_sequentialDist():\nError while checking CA-CA distance"+\ " between residues "+str(chain[res].name)+\ str(chain[res].number)+" and "+\ str(chain[res+jump].name)+\ str(chain[res+jump].number)+ " in chain "+chain.chain_id) self.log.add("Error: " + T.lastError() ) return distanceList ## def _sequentialDist(self, chain, cutoff, atom): ## """ ## Calculate sequential atom-atom distance, report residues with ## longer distance than cutoff (chain break positions). ## chain - PDB.PeptideChain ## cutoff - float, threshold for reporting gap (chain break) ## atom - str, type of atoms to check (i.e. 'CA') ## -> [int, int, ...], list of chain break positions (residue index ## for each first residue of two that are too distant) ## """ ## distanceList = [] ## for residue in range(0,len(chain)-1): ## # iterate through residue 1 to ter-1 ## try: ## vectorAtom1 = Vector(chain[residue][atom].position.array) ## vectorAtom2 = Vector(chain[residue+1][atom].position.array) ## if (vectorAtom1 - vectorAtom2).length() > cutoff: ## distanceList = distanceList + [residue] ## except: ## self.log.add( ## "_sequentialDist():\nError while checking CA-CA distance"+ \ ## " between residues "+str(chain[residue].name)+\ ## str(chain[residue].number)+" and "+str(chain[residue+1].name)+\ ## str(chain[residue+1].number)+ " in chain "+chain.chain_id) ## self.log.add("Error: " + T.lastError() ) ## return distanceList def _separateChainBreaks(self): """ Separate chains with breaks into 2 chains. The new chain(s) is/are added to the internal PDB instance (self.chains). """ fragments = [] for chain in self.chains: # res number of residues before a break breaks = self._sequentialDist(chain, 4.5, 'CA') self.log.add(str(len(breaks)) + " breaks found in chain " +\ "(" + str(len(chain)) \ + " residues) " + chain.chain_id + ": "+str(breaks)) previous = 0 ncap_next = 0 for breakRes in breaks: residues = chain.residues[previous:breakRes+1] previous = breakRes + 1 chainNew = PeptideChain(residues, chain.chain_id, chain.segment_id) if ncap_next: self.__nCap( chainNew ) ncap_next = 0 if self.capBreaks: ## add N-Methyl to c terminal self.__cCap( chainNew ) ncap_next = 1 fragments = fragments + [chainNew] chainNew = PeptideChain(chain.residues[previous:], chain.chain_id, chain.segment_id) if ncap_next: self.__nCap( chainNew ) fragments = fragments + [chainNew] self.chains = fragments def __nCap( self, pep_chain ): """ Add acetyl capping to N-terminal of peptide chain """ n = (pep_chain[0].number or 1) - 1 r = AminoAcidResidue('ACE', number=n, atoms=[Atom('CA', Vector(0,0,0), element='C')]) pep_chain.residues = [r] + pep_chain.residues self.log.add('Capping chain break with ACE %i' % n) def __cCap( self, pep_chain ): """ Add methyle amine capping to C-terminal of peptide chain """ n = (pep_chain[-1].number or len(pep_chain)) + 1 r = AminoAcidResidue('NME', number=n, atoms=[Atom('CA', Vector(0,0,0), element='C')]) pep_chain.residues = pep_chain.residues + [r] self.log.add('Capping chain break at with NME %i' % n) def extractWaters(self): """ Write waters into separate pdb file, called |pdbCode|_waters.pdb. """ try: fTarget = self.outPath + '/' +\ self.pdbname()[:4] + '_waters.pdb' pdb = PDBFile( fTarget, mode='w' ) waters = [] for key in ['HOH', 'DOD']: if self.pdb.molecules.has_key( key ): waters += self.pdb.molecules[ key ] pdb.nextChain(chain_id='', segment_id='1XWW') for w in waters: pdb.nextResidue('TIP3') ## XPLOR wants "ATOM" not "HETATM": pdb.het_flag = 0 pdb.writeAtom('OH2', w.atoms['O'].position) ## keep TIP3 waters as well if len(waters) == 0: try: TIP3_waters = self.pdb.molecules[ 'TIP3' ] except: TIP3_waters = [] for w in TIP3_waters: pdb.nextResidue('TIP3') ## XPLOR wants "ATOM" not "HETATM": pdb.het_flag = 0 pdb.writeAtom('OH2', w.atoms['OH2'].position) pdb.writeAtom('H1', w.atoms['H1'].position) pdb.writeAtom('H2', w.atoms['H2'].position) pdb.close() except: T.errWriteln("Error writing waters to %s: " % fTarget ) T.errWriteln( T.lastError() ) def next(self): """ Return next 'clean', non-redundant, non-broken chain from PDB @return: Scientific.IO.PDB.PeptideChain, completed chain OR if no chain is left @rtype: chain object OR None """ self.counter = self.counter + 1 if (len(self.chains) > self.counter): return self.chains[self.counter] else: return None
#!/usr/local/bin/python3.4 # This is the executable for this project. When run, it triggers a parse of the # current records, and continues that process until killed. from LogFile import LogFile from DNSLogDB import DNSLogDB from RadiusDB import RadiusDB from Config import config from time import sleep import cProfile, pstats, io if __name__ == '__main__': pr = cProfile.Profile() pr.enable() dnslogdb = DNSLogDB(config['databases']['dnslog']) radiusdb = RadiusDB(config['databases']['radius']) dnslogfile = LogFile(config['inputs']['logFile'], dnslogdb, radiusdb) dnslogfile.digestFile() pr.disable() s = io.StringIO() ps = pstats.Stats(pr, stream=s).sort_stats('cumulative') ps.print_stats() print(s.getvalue())