def main(): """ Application to be run each 5 minutes, it gathers information about a set of events, compress the files and send them as a commit operation. """ validate_recording() pla.logMessage("placlient: plaDirectory = " + pla.plaDirectory) # Fetch the svn client svnClient = pysvn.Client() svnClient.exception_style = 1 svnClient.callback_get_login = get_login svnClient.callback_ssl_server_trust_prompt = ssl_server_trust_prompt # See if there is a .pladata file, if not, terminate svnPLADirectory = pla.findPLASVNDataDir(svnClient, os.path.expanduser("~/.plaworkspace")) if svnPLADirectory == None: pla.logMessage("placlient: No dir .pladata in SVN") sys.exit(1) # First, we send the data of the user through a commit. # Afterwards, we obtain the last changes in their updates repository. svnSendEvents(svnClient, svnPLADirectory) svnUpdateFiles(svnPLADirectory) # And terminate gracefully with the status generated by the original SVN sys.exit(0)
def prepareDataFile(suffix): """ Function that prepares a data file with the information of the last logins. It checks first if it is allowed to send this data, if the file is there and is not empty. If so, creates a duplicate adding the suffix and returns that file name. """ global dataDir global dataFile global logPrefix # Log the execution of this function pla.logMessage(logPrefix + ': prepare ' + dataFile) # If no file is present in pladirectory, nothing to return if not os.path.exists(dataDir): pla.logMessage(logPrefix + ': Disabled. Skipping') return [] # Prepare the file to catch the output of the last command toSendFileName = dataFile + '_' + suffix dataOut = open(toSendFileName, 'w') # Execute the last command and store its output try: command = ['/usr/bin/ĺast', '-F'] pla.logMessage(logPrefix + ': executing ' + ' '.join(command)) givenCmd = subprocess.Popen(command, executable = '/usr/bin/last', \ stdout = dataOut) except OSError, e: print 'File not found (PLA)' return []
def prepareDataFile(suffix): """ Application to wrap the history of the Firefox browser. This script should be executed with each svn commit. """ global dataDir global dataFile global _tmpFile pla.logMessage('firefox: prepare ' + dataFile) # If no file is present in pladirectory, no instrumentation if not os.path.exists(dataDir): pla.logMessage('firefox: Disabled. Skipping') return [] # Copy the Firefox SQLite database to the tmp directory, in order to avoid # lock issues. ffoxDir = os.path.expanduser('~/.mozilla/firefox/') # Parse the ffox configuration config = ConfigParser.ConfigParser() config.read(os.path.join(ffoxDir, 'profiles.ini')) profileDir = os.path.join(ffoxDir, config.get('Profile0', 'Path')) sqliteFile = os.path.join(profileDir, 'places.sqlite') pla.logMessage('firefox: copy file ' + sqliteFile + ' to ' + _tmpFile) shutil.copyfile(sqliteFile, _tmpFile) # Get the timestamp for the last execution lastExecution = pla.getLastExecutionTStamp() pla.logMessage('Last execution: ' + str(lastExecution)) date_clause = '' if lastExecution != None: date_clause = "AND visit_date > " + str(int(lastExecution * 1000000)) # Get the last activity from Firefox, through a query to the # history table conn = sqlite3.connect(_tmpFile) conn.row_factory = sqlite3.Row c = conn.cursor() query = """ SELECT url, DATETIME(CAST (visit_date/1000000.0 AS INTEGER), 'unixepoch', 'localtime') AS timestamp FROM moz_historyvisits h, moz_places p WHERE h.place_id = p.id """ + date_clause + """ ORDER BY visit_date """ pla.logMessage('firefox: Query = ' + query) # Create a duplicate of the data file with the suffix toSendFileName = dataFile + '_' + suffix # Dump the data. Detect empty data because "rowcount" seems broken. dataOut = open(toSendFileName, 'w') # Boolean to capture if an alternative executable is needed use_executable = False try: # Try to execute the query c.execute(query) for row in c: dataOut.write(row['timestamp'] + ' ' + row['url'] + '\n') # Close the statement and the data file c.close() except sqlite3.DatabaseError, e: # Failed, this means the version of the sqlite executable is not # correct, use an alternative. pla.logMessage('Unable to read FFox places with sqlite library') pla.logMessage('Resorting to included binary') use_executable = True
# Close the statement and the data file c.close() except sqlite3.DatabaseError, e: # Failed, this means the version of the sqlite executable is not # correct, use an alternative. pla.logMessage('Unable to read FFox places with sqlite library') pla.logMessage('Resorting to included binary') use_executable = True # See if a seccond attempt to get the data using the executable is needed if use_executable: executable = os.path.join(pla.plaDirectory, 'bin', 'sqlite3') command = [executable, _tmpFile, query] pla.logMessage('Executing ' + ' '.join(command)) pla.logMessage('Output to ' + toSendFileName) try: givenCmd = subprocess.Popen(command, executable = executable, stdout = dataOut) except OSError, e: print 'Executable not found (OSError in PLA)', e return [] except ValueError, e: print 'Incorrect arguments (PLA)', e return [] # Wait for the process to terminate and get the output givenCmd.wait() # Close the data file
def svnSendEvents(svnClient, svnPLADirectory): # 1) Gather information depending on the tool and place it in the proper # location # 2) Create a TGZ with all the files # 3) Add the file to SVN # 4) Commit the new file to SVN dataFiles = [] tarFileName = pla.getUniqueFileName() ############################################################ # # INSTRUMENTATION # ############################################################ ############################################################ # Bash instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(pla.bash.dataDir, pla.bash.dataFile, "bash", tarFileName)) ############################################################ # Last instrumentation ############################################################ dataFiles.extend(pla.last.prepareDataFile(tarFileName)) ############################################################ # GCC instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAGcc.dataDir, PLAGcc.dataFile, "gcc", tarFileName)) ############################################################ # GDB instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAGdb.dataDir, PLAGdb.dataFile, "gdb", tarFileName)) ############################################################ # Valgrind instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAValgrind.dataDir, PLAValgrind.dataFile, "valgrind", tarFileName)) ############################################################ # Kdevelop instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAKdevelop.dataDir, PLAKdevelop.dataFile, "kdevelop", tarFileName)) ############################################################ # Kate instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAKate.dataDir, PLAKate.dataFile, "kate", tarFileName)) ############################################################ # iWatch instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(pla.iwatch.dataDir, pla.iwatch.dataFile, "iwatch", tarFileName)) ############################################################ # Firefox instrumentation ############################################################ dataFiles.extend(pla.firefox.prepareDataFile(tarFileName)) ############################################################ # Javac instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAJavac.dataDir, PLAJavac.dataFile, "javac", tarFileName)) ############################################################ # ArgoUML instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAArgouml.dataDir, PLAArgouml.dataFile, "argouml", tarFileName)) ############################################################ # WebRatio instrumentation ############################################################ dataFiles.extend(pla.prepareDataFile(PLAWebratio.dataDir, PLAWebratio.dataFile, "webratio", tarFileName)) ############################################################ pla.logMessage("placlient: Data files = " + str(dataFiles)) # If empty list of data files, terminate if dataFiles == []: pla.logMessage("placlient: empty tar file") return # TGZ all the data files tarFile = os.path.join(svnPLADirectory, tarFileName + ".tgz") pla.createTarFile(dataFiles, tarFile) ############################################################ # # REMOVE TEMPORARY FILES # ############################################################ pla.removeTemporaryData(pla.bash.dataDir, pla.bash.dataFile, "bash", tarFileName) pla.removeTemporaryData(pla.last.dataDir, pla.last.dataFile, "last", tarFileName) pla.removeTemporaryData(PLAGcc.dataDir, PLAGcc.dataFile, "gcc", tarFileName) pla.removeTemporaryData(PLAGdb.dataDir, PLAGdb.dataFile, "gdb", tarFileName) pla.removeTemporaryData(PLAValgrind.dataDir, PLAValgrind.dataFile, "valgrind", tarFileName) pla.removeTemporaryData(PLAKdevelop.dataDir, PLAKdevelop.dataFile, "kdevelop", tarFileName) pla.removeTemporaryData(PLAKate.dataDir, PLAKate.dataFile, "kate", tarFileName) pla.removeTemporaryData(pla.iwatch.dataDir, pla.iwatch.dataFile, "iwatch", tarFileName) pla.removeTemporaryData(pla.firefox.dataDir, pla.firefox.dataFile, "firefox", tarFileName) try: pla.logMessage("placlient: update " + svnPLADirectory) svnClient.update(svnPLADirectory) except pysvn.ClientError, e: # Exception when updating, not much we can do, log a message if in # debug, and terminate. pla.dumpException(e) # Remove tar file, will not be used os.remove(tarFile) return
pla.removeTemporaryData(pla.iwatch.dataDir, pla.iwatch.dataFile, "iwatch", tarFileName) pla.removeTemporaryData(pla.firefox.dataDir, pla.firefox.dataFile, "firefox", tarFileName) try: pla.logMessage("placlient: update " + svnPLADirectory) svnClient.update(svnPLADirectory) except pysvn.ClientError, e: # Exception when updating, not much we can do, log a message if in # debug, and terminate. pla.dumpException(e) # Remove tar file, will not be used os.remove(tarFile) return # Add the tar file to subversion try: pla.logMessage("placlient: add " + tarFile) svnClient.add(tarFile) except pysvn.ClientError, e: # Remove tar file, will not be used os.remove(tarFile) pla.dumpException(e) # If the "add" operation has been successfull, we may assume that the data # will eventually reach the server. The following commit could fail, but the # files are already under subversion. As a consequence, update the # footprint. pla.setLastExecutionTStamp() # TO BE DONE: Walk over the svnPLADirectory and find files that are NOT # under version control. It might be due to previous SVN command failures.
def prepareDataFile(suffix): """ Application to wrap the history of the Google Chrome browser. This script should be executed with each svn commit. """ global dataDir global dataFile global _tmpFile epoch_offset = 11644473600000000 pla.logMessage('chrome: prepare ' + dataFile) # If no file is present in pladirectory, no instrumentation if not os.path.exists(dataDir): pla.logMessage('chrome: Disabled. Skipping') return [] # Copy the Google Chrome history database to the tmp directory, in order to # avoid lock issues. gchromeDir = os.path.expanduser('~/.config/google-chrome/Default') sqliteFile = os.path.join(gchromeDir, 'History') pla.logMessage('chrome: copy file ' + sqliteFile + ' to ' + _tmpFile) shutil.copyfile(sqliteFile, _tmpFile) # Get the timestamp for the last execution lastExecution = pla.getLastExecutionTStamp() pla.logMessage('Last execution: ' + str(lastExecution)) date_clause = '' if lastExecution != None: date_clause = "AND (v.visit_time - " + str(epoch_offset) + ") > " + \ str(int(lastExecution * 1000000)) # Get the last activity from Google Chrome, through a query to the # history table conn = sqlite3.connect(_tmpFile) conn.row_factory = sqlite3.Row c = conn.cursor() query = """ SELECT u.url, DATETIME(CAST ((v.visit_time - """ + str(epoch_offset) + \ """)/1000000.0 AS INTEGER), 'unixepoch', 'localtime') AS timestamp FROM urls u, visits v WHERE v.url = u.id """ + date_clause + " ORDER BY v.visit_time " pla.logMessage('chrome: Query = ' + query) # Create a duplicate of the data file with the suffix toSendFileName = dataFile + '_' + suffix # Dump the data. Detect empty data because "rowcount" seems broken. dataOut = open(toSendFileName, 'w') # Boolean to capture if an alternative executable is needed use_executable = False try: # Try to execute the query c.execute(query) for row in c: dataOut.write(row['timestamp'] + ' ' + row['url'] + '\n') # Close the statement and the data file c.close() except sqlite3.DatabaseError, e: # Failed, this means the version of the sqlite executable is not # correct, use an alternative. pla.logMessage('Unable to read Chrome history with sqlite library') pla.logMessage('Resorting to included binary') use_executable = True
except OSError, e: print 'File not found (PLA)' return [] except ValueError, e: print 'Incorrect arguments (PLA)' return [] # Wait for the process to terminate and get the output givenCmd.wait() # Close the data dataOut.close() # If the file is empty, done if os.path.getsize(toSendFileName) == 0: pla.logMessage(logPrefix + ': No data to send') return [] # Return the new file return [toSendFileName] def main(): """ Script to simply return the history file and reset its content """ print prepareDataFile('bogus') if __name__ == "__main__": main()