def snmp_query(i, out): while True: time.sleep(.1) if out.empty(): sys.exit() print "Process Number: %s" % i ipaddr = out.get() s = Snmp() h = HostRecord() h.ip = ipaddr h.snmp_response = s.query() print h return h try: q.putmany(ips) finally: for i in range(num_workers): p = Process(target=f, args=[i, q, oq]) p.start() for i in range(num_workers): pp = Process(target=snmp_query, args=[i, oq]) pp.start() print "main process joins on queue" p.join() while not oq.empty(): print "Validated", oq.get() print "Main program finished"
def load(): queue=Queue() hostfile_line=open(sys.argv[2],'r').readlines() source=sys.argv[3] destdir=sys.argv[4] for hostfile in hostfile_line: eachline=hostfile.split() queue.put(eachline) eachline=Process(target=TRANS,args=(queue.get(),source,destdir)) eachline.start() eachline.join()
def exe(): queue=Queue() hostfile_line=open(sys.argv[2],'r').readlines() command_file=open(sys.argv[3],'r').readlines() for command_line in command_file: command_list=command_line.split('\n') command=''.join(command_list) for hostfile in hostfile_line: eachline=hostfile.split() queue.put(eachline) eachline=Process(target=SSH,args=(queue.get(),str(command))) eachline.start() eachline.join()
def cmdrun(self, cmd): comScanCmd = cmd queue = Queue() scanProc = Process( target=self.newProcExecuteCmd, args=[queue, comScanCmd]) scanProc.start() # 等待5秒 scanProc.join(10) try: scanResult = queue.get(timeout=5) except Exception as e: print "get cmd result error" scanResult = -1 scanProc.terminate() return scanResult
def cmdrun(self, cmd): try: comScanCmd = cmd queue = Queue() scanProc = Process(target=self.newProcExecuteCmd, args=[queue, comScanCmd]) scanProc.start() scanProc.join(5) try: scanResult = queue.get(timeout=30) #print scanResult except Exception, e: print e print "get cmd result error: %s " % str(e) scanResult = -1 scanProc.terminate() return scanResult
def test_possible_to_share_sqlalchemy_objects_across_process_boundaries_with_memcache(self): # write to the memcache in another process def write_to_cache(): # create storage for subprocess mc = MemcacheStore(servers=[_MEMCACHED_HOST]) sa = SqlAlchemyStore(uri="sqlite:///%s" % _SQLITE_FILENAME) ch = ChainedStore([mc, sa]) # store a MultiprocBazbot object for retrieval in the main process bb = MultiprocBazbot(1, 2, 999) ch.put(bb) p = Process(target=write_to_cache) p.start() p.join() # create storage in the main process mc = MemcacheStore(servers=[_MEMCACHED_HOST]) sa = SqlAlchemyStore(uri="sqlite:///%s" % _SQLITE_FILENAME) ch = ChainedStore([mc, sa]) # FIXME: these unit tests should be valid, but for some reason running the Shove unit tests beforehand interacts with this # # accessing the MemcacheStore directly will not work # # since the ChainedStore is the only thing that can # # conceptually link it to the SqlAlchemyStore # bb_mc = mc.get(MultiprocBazbot, {"zap": 999}) # self.assert_(isinstance(bb_mc, MultiprocBazbot)) # self.assertRaises(Exception, lambda: bb_mc.zap) # self.assertRaises(Exception, lambda: bb_mc.foo) # self.assertRaises(Exception, lambda: bb_mc.bar) # self.assertRaises(Exception, lambda: bb_mc.bleh) # accessing the ChainedStore will work since it allows # the SqlAlchemyStore to execute its merge callbacks bb = ch.get(MultiprocBazbot, {"zap": 999}) self.assert_(bb) self.assert_(isinstance(bb, MultiprocBazbot)) self.assertEqual(bb.zap, 999) self.assertEqual(bb.foo, 1) self.assertEqual(bb.bar, 2) self.assertEqual(bb.bleh, "bleh")
def run_code(): if not request.args: abort(400) pycode = request.args.get('code', '') if ("__class__" in pycode) or ("_module" in pycode): return jsonify("timed out! you have an infinite loop!") pysplit = pycode.splitlines() # print(pycode, file=sys.stderr) p = Process(target=exec, args=(pycode, myglobals)) p.start() p.join(2) p.terminate() if p.exception: if p.exception == 1: return jsonify("no error!") tb = p.exception[1] if isinstance(p.exception[0], SyntaxError): return getSynTraceback(filename, pysplit, tb) return getTraceback(filename, pysplit, tb) return jsonify("timed out! you have an infinite loop!")
ips = IP("10.0.1.0/24") def f(i, q): while True: if q.empty(): sys.exit() print "Process Number: %s" % i ip = q.get() ret = subprocess.call("ping -c 1 %s" % ip, shell=True, stdout=open('/dev/null', 'w'), stderr=subprocess.STDOUT) if ret == 0: print "%s: is alive" % ip else: print "Process Number: %s didn't find a response for %s" % (i, ip) for ip in ips: q.put(ip) #q.put("192.168.1.1") for i in range(50): p = Process(target=f, args=[i, q]) p.start() print "main process joins on queue" p.join() print "Main Program finished"
pass print "ok" def snmp_query(i, out): while True: time.sleep(.1) if out.empty: sys.exit() print "Process Number: %s" % i ipaddr = out.get() s = Snmp() h = HostRecord() h.snmp_response = s.query() print h return h try: q.put(ips) finally: for i in range(num_workers): pp = Process(target=f, args=[i, q, oq]) pp.start() print "main process joins on queue" P.join() print "Main Program finshed"
#/usr/bin/env python # -*- coding: utf-8 -*- from processing import Process, Queue import time def f(q): x = q.get() print "Process number %s, sleeps for %s seconds" %(x, x) time.sleep(x) print "Process number %s finished" % x q = Queue() for i in range(10): q.put(i) i = Process(target=f, args=[q] i.start() print "main process joins on queue" i.join() print "Main Program finished"
end = 0 for i in range(PLATFORMDIVISION): start_index.append(start) end = start + bids_number_pertime if bids_number_reminder > 0: end += 1 bids_number_reminder -= 1 end_index.append(end) start = end cur_db_list = getCursors(conn_db, n=PLATFORMDIVISION) print cur_db_list #重复白名单 platform_id_white_list = getPlatformIdList("repeatwhiteplatform_id.txt") for i in range(PLATFORMDIVISION): j=Process(target=readSQLReturn,args=[[start_index[i], end_index[i], cur_db_list[i]]]) j.start() j.join() print "共有" + str(bids_number) + "个标." stringSQL = "SELECT count(*) FROM " + dstdb_info cur_db.execute(stringSQL) good_number = cur_db.fetchone()[0] print "在满额限制为"+ str(FULLBIDPERCENT) + "的情况下, 只有" + "%.2f" % (100 * (float(good_number)/bids_number)) + "%的数据是可用的." #changeValue("./clean_date.xml","clean_date_lasttime",clean_date_thistime) fp.close() #将所有的platform_id输出到文件 fp = open("platform_id_list_info.txt","w") for platform_id in platform_id_set: fp.write(str(platform_id)) fp.write("\n") fp.close() closeCursors(cur_db)
def rpc_processTest(self, **kwargs): self.app.setThermo('build_pdf', 0, 'Preparo elaborazione', 10, command='init') p = Process(target=self.testOtherProcess, args=(self.pageLocalDocument('testOtherProcess'),), name='pippo') p.start()
#datetime(2004, 8, 9, 0, 0), #datetime(2004, 8, 10, 0, 0), #datetime(2004, 8, 11, 0, 0), #datetime(2004, 8, 12, 0, 0), #datetime(2004, 8, 13, 0, 0), ] # create Queues queue = BufferedQueue() result = Queue() p1 = Process(target=process_func, args=[queue, result]) p2 = Process(target=process_func, args=[queue, result]) p1.setStoppable(True); p2.setStoppable(True) p1.start(); p2.start() #strategy_args = random_strategies_generator_2(periods=(1,2,3,4,5), # strats_per_period=24) strategy_args = [ (('long_tops', 4, 'low', 'HH', 6, 7), ('short_tops', 4, 'high', 'LL', 6, 7)) ] for (s1, s2) in strategy_args: print s1, s2 # create tasks for this batch tasks = {} task_id = 0 for i in range(len(strategy_args)): args = strategy_args[i]
def main(): """Start the Feedback Controller.""" # Get Options description = """Feedback Controller""" usage = "%prog [Options]" version = """ Copyright (C) 2007-2010 Bastian Venthur <venthur at cs tu-berlin de> Homepage: http://bbci.de/pyff This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ parser = OptionParser(usage=usage, version=version, description=description) parser.add_option('-l', '--loglevel', type='choice', choices=['critical', 'error', 'warning', 'info', 'debug', 'notset'], dest='loglevel', help='Which loglevel to use for everything but the Feedbacks. Valid loglevels are: critical, error, warning, info, debug and notset. [default: warning]', metavar='LEVEL') parser.add_option('--fb-loglevel', type='choice', choices=['critical', 'error', 'warning', 'info', 'debug', 'notset'], dest='fbloglevel', help='Which loglevel to use for the Feedbacks. Valid loglevels are: critical, error, warning, info, debug and notset. [default: warning]', metavar='LEVEL') parser.add_option('-p', '--plugin', dest='plugin', help="Optional Plugin, the Feedback Controller should inject into the Feedback.", metavar="MODULE") parser.add_option('-a', '--additional-feedback-path', dest='fbpath', help="Additional path to search for Feedbacks.", metavar="DIR") parser.add_option('--port', dest='port', help="Set the Parallel port address to use. Windows only. Should be in Hex (eg: 0x378)", metavar="PORTNUM") parser.add_option("--nogui", action="store_true", default=False, help="Start without GUI.") options, args = parser.parse_args() # Initialize logging str2loglevel = {'critical' : logging.CRITICAL, 'error' : logging.ERROR, 'warning' : logging.WARNING, 'info' : logging.INFO, 'debug' : logging.DEBUG, 'notset' : logging.NOTSET} loglevel = str2loglevel.get(options.loglevel, logging.WARNING) fbloglevel = str2loglevel.get(options.fbloglevel, logging.WARNING) logging.basicConfig(level=loglevel, format='[%(process)-5d:%(threadName)-10s] %(name)-25s: %(levelname)-8s %(message)s') logging.info('Logger initialized with level %s.' % options.loglevel) logging.getLogger("FB").setLevel(fbloglevel) # get the rest plugin = options.plugin fbpath = options.fbpath guiproc = None if not options.nogui: guiproc = Process(target=GUI.main) guiproc.start() port = None if options.port != None: port = int(options.port, 16) try: fc = FeedbackController(plugin, fbpath, port) fc.start() except (KeyboardInterrupt, SystemExit): logging.debug("Caught keyboard interrupt or system exit; quitting") except: logging.error("Caught an exception, quitting FeedbackController.") print traceback.format_exc() finally: print "Stopping FeedbackController...", fc.stop() if guiproc: guiproc.terminate() print "Done."
for i in range(PLATFORMDIVISION): start_index.append(start) end = start + bids_number_pertime if bids_number_reminder > 0: end += 1 bids_number_reminder -= 1 end_index.append(end) start = end cur_db_list = getCursors(conn_db, n=PLATFORMDIVISION) print cur_db_list #重复白名单 platform_id_white_list = getPlatformIdList("repeatwhiteplatform_id.txt") for i in range(PLATFORMDIVISION): j = Process(target=readSQLReturn, args=[[start_index[i], end_index[i], cur_db_list[i]]]) j.start() j.join() print "共有" + str(bids_number) + "个标." stringSQL = "SELECT count(*) FROM " + dstdb_info cur_db.execute(stringSQL) good_number = cur_db.fetchone()[0] print "在满额限制为" + str(FULLBIDPERCENT) + "的情况下, 只有" + "%.2f" % ( 100 * (float(good_number) / bids_number)) + "%的数据是可用的." #changeValue("./clean_date.xml","clean_date_lasttime",clean_date_thistime) fp.close() #将所有的platform_id输出到文件 fp = open("platform_id_list_info.txt", "w") for platform_id in platform_id_set: fp.write(str(platform_id)) fp.write("\n") fp.close()
print "Process Number: %s didn't find a response for %s " % (i, ip) pass def snmp_query(i,out): while True: time.sleep(.1) if out.empty(): sys.exit() print "Process Number: %s" % i ipaddr = out.get() s = Snmp() h = HostRecord() h.ip = ipaddr h.snmp_response = s.query() print h return h try: q.putmany(ips) finally: for i in range(num_workers): p = Process(target=f, args=[i,q,oq]) p.start() for i in range(num_workers): pp = Process(target=snmp_query, args=[i,oq]) pp.start() print "main process joins on queue" p.join() print "Main Program finished"
Frame, Concept, Frequency, Relation, Batch from csc.corpus.models import Language, Sentence from csc.nl.models import FunctionClass, FunctionWord from events.models import Event, Activity from voting.models import Vote from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType if __name__ == '__main__': serializer = serializers.get_serializer("myyaml")() from csc.conceptnet4.models import RawAssertion, Assertion, SurfaceForm,\ Frame, Concept, Frequency, Relation, Batch from csc.corpus.models import Language, Sentence from csc.nl.models import FunctionClass, FunctionWord from events.models import Event, Activity from voting.models import Vote from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType classes = [Vote, RawAssertion, Frame, SurfaceForm, Assertion, Relation, Frequency, Concept, FunctionClass, FunctionWord, Language, Sentence, User, ContentType, Activity, Batch] classes1 = classes[0:5] classes2 = classes[5:10] classes3 = classes[10:15] classes4 = classes[15:] for working_set in (classes1, classes2, classes3, classes4): proc = Process(target=worker, args=[working_set]) proc.start()
def algo(request, algo): text = "" type = "" algo_object = get_object_or_404(Algo, shortTitle=algo) manual = get_object_or_404(ManPage, algo=algo_object) embedFormDict = { 'cpt' : CPTEmbedForm, 'f5' : F5EmbedForm, 'lsb' : LsbEmbedForm, 'gifshuffle' : GifShuffleEmbedForm, 'bs' : BattlestegEmbedForm, } extractFormDict = { 'cpt' : CPTExtractForm, 'f5' : F5ExtractForm, 'lsb' : LsbExtractForm, 'gifshuffle' : GifShuffleExtractForm, 'bs' : BattlestegExtractForm, } typeDict = { 'cpt' : "png", 'f5' : "jpeg", 'lsb' : "png", 'gifshuffle' : "gif", 'bs' : "png", } if request.method == 'POST': q = Queue() # embedding if "submit1" in request.POST: algoDict = { 'cpt' : cptEmbed, 'f5' : f5Embed, 'lsb' : lsbEmbed, 'gifshuffle' : gifShuffleEmbed, 'bs' : bsEmbed, } embedForm = embedFormDict[algo](request.POST, request.FILES) extractForm = extractFormDict[algo]() type = typeDict[algo] p = Process(target=algoDict[algo], args=(q, )) # fork process to embed if embedForm.is_valid(): p.start() q.put([request.POST, request.FILES['file'].temporary_file_path()]) os.system("sleep 1") try: retval = q.get(True, 10) except Q.Empty: retval = -2 p.join() if retval == -1: text += "%s-Datei nicht gefunden oder fehlerhaft."%(type) elif retval == -2: text += "Fehler beim Einbetten. Anderes Bild oder andere Parameter versuchen." else: return createResponse(retval, type) # extracting elif "submit2" in request.POST: algoDict = { 'cpt' : cptExtract, 'f5' : f5Extract, 'lsb' : lsbExtract, 'gifshuffle' : gifShuffleExtract, 'bs' : bsExtract, } embedForm = embedFormDict[algo]() extractForm = extractFormDict[algo](request.POST, request.FILES) type = typeDict[algo] p = Process(target=algoDict[algo], args=(q, )) # fork process to extract if extractForm.is_valid(): p.start() q.put([request.POST, request.FILES['file'].temporary_file_path()]) try: retval = q.get(True, 10) except Q.Empty: retval = -2 p.join() if retval == -1: text += "%s-Datei nicht gefunden oder fehlerhaft."%(type) elif retval == -2: text += "Fehler beim Ausbetten. Anderes Bild oder andere Parameter versuchen." else: #print retval text += retval # empty form else: embedForm = embedFormDict[algo]() extractForm = extractFormDict[algo]() # render return render_to_response("stego_algo.html", {'algo' : algo_object, 'embedForm' : embedForm, 'extractForm' : extractForm, 'text' : text, 'algo_type' : 'Staganographie', 'manual' : manual,})