def statsBtn(self): xmlFileName = self.findxmlPT.toPlainText() if xmlFileName == '': msg = QMessageBox() msg.setIcon(QMessageBox.Critical) msg.setText("Error") msg.setInformativeText( 'Please select Xml Files and Product Base File') msg.setWindowTitle("Error") msg.exec_() else: st.main(xmlFileName)
def main(argv): lockdir = str(os.getcwd() + "/" + "dataload.lock") recheckdir = str(os.getcwd() + "/" + "recheck") startmode = startcheck(lockdir, recheckdir) verbose = False # Set cowtime (loader timeout) to 5 minutes cowtime = 60 * 5 try: for opt in argv: # Set new database mode and cowtime to 24 hours if -n flag if opt == '-n': startmode = 'newdb' cowtime = 60 * 60 * 24 # Run recheck if -r flag elif opt == '-r' and startmode != 'newdb': startmode = 'recheck' # Send verbose messages to stderr if -v flag elif opt == '-v': verbose = True # Set cowtime to 24 hours if -l flag elif opt == '-l': cowtime = 60 * 60 * 24 except: pass try: with timeout(cowtime, exception=Exception('DBLoader Timeout')): # Get block heights daemon = jsonrpc("getblockcount") if daemon['Status'] != 'error': top_height = daemon['Data'] blk_height = query_single('SELECT height FROM block ORDER BY height DESC LIMIT 1') if not blk_height: blk_height = 1 else: blk_height = int(blk_height[0] + 1) else: loader_error_log(daemon['Data'], 'Get Block Height') raise Exception(daemon['Data']) # Sleep is needed to allow the daemon time to catch orphans if startmode != 'newdb': time.sleep(15) # Recheck mode, re-parse the last 5 blocks in the database if startmode == 'recheck' and blk_height > 5: if verbose: print >> sys.stderr, "Recheck Called" for blk in range(blk_height - 5, blk_height): orphan(blk, True) # Check last (blockcheck) blocks for orphans and fix if needed blockcheck = int(CONFIG["loader"]["blockcheck"]) if blk_height > blockcheck: for blk in range(blk_height - blockcheck, blk_height): d_hash = jsonrpc('getblockhash', blk) db_hash = query_single('SELECT hash FROM block where height = %s', blk)[0] if d_hash['Data'] != db_hash: orphan(blk) # Genesis block TX needs to be entered manually. Process block information only if startmode == 'newdb': b_hash = jsonrpc("getblockhash", 0)['Data'] block = jsonrpc("getblock", b_hash)['Data'] block['raw'] = json.dumps(block, sort_keys=False, indent=1) add_row('block', block) # Set up top_address table for i in range(int(CONFIG['stat']['richlistlen'])): ret = query_noreturn('INSERT INTO top_address (rank) VALUES(%s)', i + 1) # Set up stats table ret = query_noreturn('INSERT INTO stats (peer_txt,m_index) VALUES("None",1)') blk_height = 1 # Process blocks loop while blk_height <= top_height: ret = process_block(blk_height) if ret['Status'] == 'error': raise Exception(ret['Data']) if startmode == 'newdb' and blk_height == 101: ret = query_noreturn('TRUNCATE large_tx') time.sleep(5) ret = query_noreturn('INSERT INTO large_tx SELECT tx_hash,SUM(value) FROM tx_out GROUP BY tx_hash ORDER BY SUM(value) DESC LIMIT 100') blk_height += 1 if verbose: print >> sys.stderr, 'Processing Block: ', blk_height, ' of ', top_height, '\r', # Call Statistics module if CONFIG['loader']['stats'] == 'true': if verbose: print >> sys.stderr, '\nCalling Statistics Module' stats.main() except Exception as e: loader_error_log(e, 'Main loop') conn.close() os.remove(os.path.expanduser(lockdir)) if verbose: print >> sys.stderr, '\nMain Loop', str(e) sys.exit(0) # Clean up conn.close() if verbose: print >> sys.stderr, "Database load complete" os.remove(os.path.expanduser(recheckdir)) os.remove(os.path.expanduser(lockdir))
def main(): stats.main()
Program acting as a controller for the front and back end systems in the docker-compose networks. """ import os.path import time import docker from stats import main if __name__ == "__main__": #Loop continuing to run until a file/option has be inputted/chosen while True: #If the start.txt file is created from inputted choices on the website if os.path.isfile("/work/start.txt") == True: #obtain docker information and run finder_program_1 client = docker.from_env() container = client.containers.get('finder_program_1') #run the first command to start the pipeline described in program with the right work directory #Essentially running docker exec -w /program finder_program_1 COMMAND container.exec_run('bash /program/commands/download.sh', workdir="/program") #If the summary statistics option is chosen then run the stats function (imported the main function from stats.py) if os.path.isfile("/work/stats.txt") == True: try: main() except: #Remove file if expected input into summary statistics parameter is incorrect if os.path.isfile("/work/stats.txt"): os.remove("/work/stats.txt") #Set a sleep function loop not going off as often causing computational stress on servers/systems. time.sleep(1)
""" gerrit-stats: Generate codereview stats based from Gerrit commits Copyright (C) 2012 Diederik van Liere, Wikimedia Foundation This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ __version__ = '0.1.0' VERSION = tuple(map(int, __version__.split('.'))) from gerrit import Gerrit from repo import Repo, Observation from changeset import Changeset, Developer, Patchset, Review from stats import main if __name__ == '__main__': main()
def main(options, args): logger.info('starting Merge-o-Matic version %s', VERSION) logger.debug('options: %r', options) logger.debug('args: %r', args) ROOT = config.get('ROOT') lockdir = "%s/.lock" % ROOT codedir = os.path.abspath(os.path.dirname(__file__)) unpackeddir = "%s/unpacked" % ROOT # Some modules assume we're already here os.chdir(ROOT) # Default options values referenced in various *.main() functions options.exclude = None options.include = None options.version = None options.source_distro = None options.source_suite = None try: os.umask(002) try: logger.debug('Locking %r', lockdir) os.makedirs(lockdir) except Exception: raise Exception("LOCKED (another one running?)") try: if not os.path.isdir('%s/merges' % ROOT): os.makedirs("%s/merges" % ROOT) shutil.copy2("%s/addcomment.py" % codedir, "%s/merges/addcomment.py" % ROOT) except Exception: logger.exception('Unable to copy addcomment.py into %s/merges:', ROOT) # Update the Sources files against new packages that have been # downloaded update_sources.main(options, args) # Generate changes, diffs and patches generate_diffs.main(options, args) generate_dpatches.main(options, args) # Publish the patches publish_patches.main(options, args) # Run the merge tool produce_merges.main(options, args) # Commit committable changes to OBS commit_merges.main(options, args) notify_action_needed.main(options, args) # Produce pretty reports stats.main(options, args) stats_graphs.main(options, args) merge_status.main(options, args) # Expire any old packages from the pool expire_pool.main(options, args) # ?! untidy try: for entry in os.listdir(unpackeddir): p = "%s/%s" % (unpackeddir, entry) logger.debug('Removing unpacked directory %s', p) shutil.rmtree(p) except Exception as e: logger.debug('Cancelling removal of unpacked directories: %r', e) finally: try: logger.debug('Unlocking %r', lockdir) os.rmdir(lockdir) except Exception as e: logger.debug('Failed to unlock %r: %r', lockdir, e)
def test_stats(self): statistics = stats.main(['--output=stats.json']) self.assertTrue(statistics)
def statsComment(ddt, reddit): try: statComment = stats.main(reddit) ddt.reply(statComment) except Exception: print('stats comment error')
def main(): usage = "Usage: vcfPytools.py [tool] [options]\n\n" + \ "Available tools:\n" + \ " annotate:\n\tAnnotate the vcf file with membership in other vcf files.\n" + \ " extract:\n\tExtract vcf records from a region.\n" + \ " filter:\n\tFilter the vcf file.\n" + \ " indel:\n\tIndel manipulation tools.\n" + \ " intersect:\n\tGenerate the intersection of two vcf files.\n" + \ " merge:\n\tMerge a list of vcf files.\n" + \ " multi:\n\tFind the intersections and unique fractions of multiple vcf files.\n" + \ " sort:\n\tSort a vcf file.\n" + \ " stats:\n\tGenerate statistics from a vcf file.\n" + \ " union:\n\tGenerate the union of two vcf files.\n" + \ " unique:\n\tGenerate the unique fraction from two vcf files.\n" + \ " validate:\n\tValidate the input vcf file.\n\n" + \ "vcfPytools.py [tool] --help for information on a specific tool." # Determine the requested tool. if len(sys.argv) > 1: tool = sys.argv[1] else: print >> sys.stderr, usage exit(1) if tool == "annotate": import annotate success = annotate.main() elif tool == "extract": import extract success = extract.main() elif tool == "filter": import filter success = filter.main() elif tool == "intersect": import intersect success = intersect.main() elif tool == "indel": import indel success = indel.main() elif tool == "multi": import multi success = multi.main() elif tool == "merge": import merge success = merge.main() elif tool == "sort": import sort success = sort.main() elif tool == "stats": import stats success = stats.main() elif tool == "union": import union success = union.main() elif tool == "unique": import unique success = unique.main() elif tool == "test": import test success = test.main() elif tool == "validate": import validate success = validate.main() elif tool == "--help" or tool == "-h" or tool == "?": print >> sys.stderr, usage else: print >> sys.stderr, "Unknown tool: ", tool print >> sys.stderr, "\n", usage exit(1) # If program completed properly, terminate. if success == 0: exit(0)
def __init__(self): app.data_location='/home/'+getpass.getuser()+'/.expensemanager/data/' dest_dir = os.path.join(app.data_location[0:-5],'data/') try: f=open(app.data_location+'years','r') f.readlines() f.close() except : #script_dir = os.path.dirname(os.path.abspath(__file__)) #dest_dir = os.path.join(app.data_location) #print type(dest_dir), dest_dir #print 1 try: #print 2 os.makedirs(dest_dir) except OSError: pass self.window=gtk.Window() self.window.set_default_size(1220,658) self.window.set_position(gtk.WIN_POS_CENTER) self.window.connect("delete_event",self.terminate) self.window.set_title("Expense Manager") vbox = gtk.VBox(False) hbox = gtk.HBox() settings=(gtk.Button()).get_settings() settings.set_property("gtk-button-images",True) button1 = gtk.Button(stock=gtk.STOCK_EDIT) button1.connect('clicked',self.edit) button2 = gtk.Button(stock=gtk.STOCK_ADD) button2.connect('clicked',self.gocl) #liststore for months liststore = gtk.ListStore(str) self.months=["JAN",'FEB','MAR','APR','MAY','JUNE','JULY','AUG','SEPT','OCT','NOV','DEC'] for i in self.months: liststore.append([i]) cell = gtk.CellRendererText() self.combobox = gtk.ComboBox(liststore) self.combobox.pack_start(cell, True) self.combobox.add_attribute(cell, "text", 0) self.combobox.connect('changed',self.changed_item) now=datetime.datetime.now() self.mm=now.month-1 self.dd=now.day self.yy=now.year #self.yy='2012' self.combobox.set_active(self.mm) a= self.combobox.get_active_text() #dest_dir = os.path.join(/home/+get,getpass.getuser() 'data') self.fname=app.data_location+str(self.yy)+'_'+a try: f=open(app.data_location+'years','r') f.close() except : f=open(app.data_location+'years','w') f.write(str(self.yy)+'\n') f.close() #hbox.add(button1) hbox.pack_start(button2,False) hbox.pack_start(button1,False) #hbox.add(button4) self.select_years() hbox.pack_end(self.combobox2,False) hbox.pack_end(self.combobox,False) button5=gtk.Button(stock=gtk.STOCK_ABOUT) button5.connect('clicked',self.about) #hbox.pack_end(buttmon5,False) vbox.pack_start(hbox, False) #hbox contains the add/stats/edit etc buttons/comboboxes hbox2=gtk.HBox() hbox2.pack_end(button5,False) #button5 is the about button label_user=gtk.Label(' Welcome, '+getpass.getuser().title()+'.') hbox2.pack_start(label_user,False) vbox.pack_end(hbox2,False) #hbox2 holds only the about button sw = gtk.ScrolledWindow() sw.set_shadow_type(gtk.SHADOW_ETCHED_IN) sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) store = self.create_model() self.treeView = gtk.TreeView(store) #tvc=gtk.TreeViewColumn() self.treeView.set_rules_hint(True) self.treeView.connect('cursor-changed',self.on_activated) sw.add(self.treeView) pane=gtk.HPaned() pane.pack1(sw)#,resize=True, shrink=True) #self.sw_graphs=gtk.ScrolledWindow() #self.sw_graphs.set_shadow_type(gtk.SHADOW_ETCHED_IN) #self.sw_graphs.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) self.f = plt.figure(dpi=75,facecolor='w') #self.f.patch.set_alpha(0.95) self.f.subplots_adjust(left = 0.08,bottom=0.1,top = 0.9,right=0.95,wspace=0.25,hspace=0.25) self.canvas = FigureCanvas(self.f) self.line1=[] self.line1b=[] self.line2=[] self.graphs(1) self.graphs(2) #self.sw_graphs.add_with_viewport(self.canvas) frame=gtk.Frame() frame.add(self.canvas) pane_rightPane=gtk.VPaned() pane_stats_viewer=gtk.HPaned() viewer_sw = gtk.ScrolledWindow() viewer_sw.set_shadow_type(gtk.SHADOW_ETCHED_IN) viewer_sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) viewer_sw.add(viewer.treeView) x=app.data_location+str(self.yy)+'_'+ str(self.months[self.mm])+' '+str(self.dd) #print x viewer.update(self,x) viewer.main(self) stats_sw = gtk.ScrolledWindow() stats_sw.set_shadow_type(gtk.SHADOW_ETCHED_IN) stats_sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) stats.main(self,self.fname) stats_sw.add(stats.treeView) pane_stats_viewer.add1(stats_sw) pane_stats_viewer.set_position(182) pane_stats_viewer.add2(viewer_sw) pane_rightPane.add1(frame) pane_rightPane.set_position(390) pane_rightPane.add2(pane_stats_viewer) pane.pack2(pane_rightPane,resize=True, shrink=False) pane.set_position(590) #pane.compute_position(1120, True, False) #pane.queue_resize() vbox.pack_start(pane) self.create_columns(self.treeView) self.window.add(vbox) self.window.show_all()
def main(argv): lockdir = str(os.getcwd() + "/" + "dataload.lock") recheckdir = str(os.getcwd() + "/" + "recheck") startmode = startcheck(lockdir, recheckdir) verbose = False doWait = True # Set cowtime (loader timeout) to 5 minutes cowtime = 60 * 5 try: for opt in argv: # Set new database mode and cowtime to 24 hours if -n flag if opt == '-n': startmode = 'newdb' cowtime = 60 * 60 * 24 # Run recheck if -r flag elif opt == '-r' and startmode != 'newdb': startmode = 'recheck' # Send verbose messages to stderr if -v flag elif opt == '-v': verbose = True # Wait elif opt == '-w': doWait = False # Set cowtime to 24 hours if -l flag elif opt == '-l': cowtime = 60 * 60 * 24 except: pass try: with timeout(cowtime, exception=Exception('DBLoader Timeout')): # Get block heights daemon = jsonrpc("getblockcount") if daemon['Status'] != 'error': top_height = daemon['Data'] blk_height = query_single( 'SELECT height FROM block ORDER BY height DESC LIMIT 1') if not blk_height: blk_height = 1 else: blk_height = int(blk_height[0] + 1) else: loader_error_log(daemon['Data'], 'Get Block Height') raise Exception(daemon['Data']) # Sleep is needed to allow the daemon time to catch orphans if startmode != 'newdb' and doWait: time.sleep(3) # Recheck mode, re-parse the last 5 blocks in the database if startmode == 'recheck' and blk_height > 5: if verbose: print >> sys.stderr, "Recheck Called" for blk in range(blk_height - 5, blk_height): orphan(blk, True) # Check last (blockcheck) blocks for orphans and fix if needed blockcheck = int(CONFIG["loader"]["blockcheck"]) if blk_height > blockcheck: for blk in range(blk_height - blockcheck, blk_height): d_hash = jsonrpc('getblockhash', blk) db_hash = query_single( 'SELECT hash FROM block where height = %s', blk)[0] if d_hash['Data'] != db_hash: orphan(blk) if startmode == 'newdb': # Set up top_address table for i in range(int(CONFIG['stat']['richlistlen'])): ret = query_noreturn( 'INSERT INTO top_address (rank) VALUES(%s)', i + 1) # Set up stats table ret = query_noreturn( 'INSERT INTO stats (peer_txt,cvn_txt) VALUES("none","none")' ) blk_height = 0 # Process blocks loop while blk_height <= top_height: ret = process_block(blk_height) if ret['Status'] == 'error': raise Exception(ret['Data']) if startmode == 'newdb' and blk_height == 101: ret = query_noreturn('TRUNCATE large_tx') time.sleep(1) ret = query_noreturn( 'INSERT INTO large_tx SELECT tx_hash,SUM(value) FROM tx_out GROUP BY tx_hash ORDER BY SUM(value) DESC LIMIT 100' ) blk_height += 1 if verbose: print >> sys.stderr, 'Processing Block: ', blk_height, ' of ', top_height, '\r', # Call Statistics module if CONFIG['loader']['stats'] == 'true': if verbose: print >> sys.stderr, '\nCalling Statistics Module' stats.main() except Exception as e: print "Exception in user code:" print '-' * 60 traceback.print_exc(file=sys.stdout) print '-' * 60 loader_error_log(str(e), 'Main loop') conn.close() os.remove(os.path.expanduser(lockdir)) if verbose: print >> sys.stderr, '\nMain Loop', str(e) sys.exit(0) # Clean up conn.close() if verbose: print >> sys.stderr, "Database load complete" os.remove(os.path.expanduser(recheckdir)) os.remove(os.path.expanduser(lockdir))
def main(): if args.sweep: sweep() sys.exit(0) if args.stats: stats.main() sys.exit(0) if args.post_id: log.debug("Processing single submission {}".format(args.post_id)) process_submission(praw.models.Submission(reddit, id=args.post_id), not args.comment, not args.flair) sys.exit(0) try: check_banned( not args.comment, not args.flair) # repeats on CHECK_INTERVAL minutes interval except KeyboardInterrupt: log.info("Received SIGINT, terminating") sys.exit(0) # Iterate over every new submission forever while True: subreddit = reddit.subreddit(SUB) submission_stream = subreddit.stream.submissions() # two layers of exception handling, one for the processing and one for the submission stream try: for submission in submission_stream: try: # Already processed; praw returns the past 100 results for streams, previously iterated over or not if DB_MAIN.submission_exists(submission.id): log.debug("Submission {} is already processed".format( submission.id)) continue process_submission(submission, not args.comment, not args.flair) except RequestException as e: log.warning( "Request exception while processing submission {}: {}. Waiting 10 seconds" .format(submission.id, str(e))) time.sleep(10) except ServerError as e: log.warning( "Server error while processing submission {}: {}. Reddit likely under heavy load" .format(submission.id, str(e))) except json.decoder.JSONDecodeError as e: log.warning( "JSONDecode exception while processing submission {}: {}." .format(submission.id, str(e))) except Exception as e: log.critical( "some other error while processing submission {}: {}". format(submission.id, str(e))) except KeyboardInterrupt: log.info("Received SIGINT, terminating") sys.exit(0) except RequestException as e: log.warning( "Request exception in submission stream: {}. Waiting 10 seconds" .format(str(e))) time.sleep(10) except ServerError as e: log.warning("Server error in submission stream: {}.".format( str(e))) except json.decoder.JSONDecodeError as e: log.warning( "JSONDecode exception in submission stream: {}.".format( str(e))) except Exception as e: log.critical("some other error in submission stream: {}".format( str(e))) # sleep for two minutes, give any connection issues some time to resolve itself time.sleep(60 * 2)
def main(): usage = "Usage: vcfPytools.py [tool] [options]\n\n" + \ "Available tools:\n" + \ " annotate:\n\tAnnotate the vcf file with membership in other vcf files.\n" + \ " extract:\n\tExtract vcf records from a region.\n" + \ " filter:\n\tFilter the vcf file.\n" + \ " indel:\n\tIndel manipulation tools.\n" + \ " intersect:\n\tGenerate the intersection of two vcf files.\n" + \ " merge:\n\tMerge a list of vcf files.\n" + \ " multi:\n\tFind the intersections and unique fractions of multiple vcf files.\n" + \ " sort:\n\tSort a vcf file.\n" + \ " stats:\n\tGenerate statistics from a vcf file.\n" + \ " union:\n\tGenerate the union of two vcf files.\n" + \ " unique:\n\tGenerate the unique fraction from two vcf files.\n" + \ " validate:\n\tValidate the input vcf file.\n\n" + \ "vcfPytools.py [tool] --help for information on a specific tool." # Determine the requested tool. if len(sys.argv) > 1: tool = sys.argv[1] else: print >> sys.stderr, usage exit(1) if tool == "annotate": import annotate success = annotate.main() elif tool == "extract": import extract success = extract.main() elif tool == "filter": import filter success = filter.main() elif tool == "intersect": import intersect success = intersect.main() elif tool == "indel": import indel success = indel.main() elif tool == "multi": import multi success = multi.main() elif tool == "merge": import merge success = merge.main() elif tool == "sort": import sort success = sort.main() elif tool == "stats": import stats success = stats.main() elif tool == "union": import union success = union.main() elif tool == "unique": import unique success = unique.main() elif tool == "test": import test success = test.main() elif tool == "validate": import validate success = validate.main() elif tool == "--help" or tool == "-h" or tool == "?": print >> sys.stderr, usage else: print >> sys.stderr, "Unknown tool: ",tool print >> sys.stderr, "\n", usage exit(1) # If program completed properly, terminate. if success == 0: exit(0)
def main(wg=None, window=None): if wg is None: window = Tk.Tk() window.title(info.windowName) window.geometry("600x492") window.configure(background="#2b2b2b") window.resizable(False, False) else: window.destroy() window = Tk.Tk() window.title(info.windowName) window.geometry(wg) window.iconbitmap(default="data/img.ico") window.configure(background="#2b2b2b") window.resizable(False, False) window.grid_rowconfigure(1, minsize=350) window.grid_columnconfigure(2, minsize=150) window.grid_columnconfigure(1, minsize=200) window.grid_columnconfigure(0, minsize=200) img = Image.open("data/img.png") img = img.resize((600, 400), Image.ANTIALIAS) img = ImageTk.PhotoImage(img) Tk.Label(window, image=img, background="#2b2b2b").grid(row=0, column=0, columnspan=4, rowspan=2, sticky="NSEW") Tk.Button(window, text="Play", font=("Odin Rounded", 36), background="#3c3f41", borderwidth=1, relief=Tk.SUNKEN, command=lambda: start(window.winfo_geometry(), window), highlightcolor="#515151").grid(row=2, column=0, sticky="NSEW") Tk.Button(window, text="Stats", font=("Odin Rounded", 36), background="#3c3f41", borderwidth=1, relief=Tk.SUNKEN, command=lambda: stats.main(window.winfo_geometry(), window), highlightcolor="#515151").grid(row=2, column=1, sticky="NSEW") Tk.Button(window, text="Quit", font=("Odin Rounded", 36), background="#3c3f41", borderwidth=1, relief=Tk.SUNKEN, highlightcolor="#515151", command=window.destroy).grid(row=2, column=2, sticky="NSEW", columnspan=2) c = Tk.Canvas(window, background="#3c3f41", highlightthickness=1, highlightcolor="#515151", height=50, relief=Tk.SUNKEN, width=50) c.create_text(26, 26, fill="#000000", text=u"\u2699", font=("Odin Rounded", 25)) c.grid(row=0, column=3, sticky="NSEW") c.bind("<Button-1>", lambda x: oppClicked(c)) c.bind("<ButtonRelease-1>", lambda x: oppReleased(c, window)) window.mainloop()