Example #1
0
    def snmp_query(i, out):
        while True:
            time.sleep(.1)
            if out.empty():
                sys.exit()
                print "Process Number: %s" % i
            ipaddr = out.get()
            s = Snmp()
            h = HostRecord()
            h.ip = ipaddr
            h.snmp_response = s.query()
            print h
            return h
        try:
            q.putmany(ips)

        finally:
            for i in range(num_workers):
                p = Process(target=f, args=[i, q, oq])
                p.start()
            for i in range(num_workers):
                pp = Process(target=snmp_query, args=[i, oq])
                pp.start()

        print "main process joins on queue"

        p.join()
        while not oq.empty():
            print "Validated", oq.get()

        print "Main program finished"
Example #2
0
    def __init__(self, src, dst, gz=False, level=6):
        Process.__init__(self)
        self.src = src
        self.dst = dst
        self.gz = gz
        self.level = level

        info("Cloning '%s' to '%s' (gz = %d)" % (src, dst, gz))
Example #3
0
    def __init__(self, src, dst, gz=False, level=6):
        Process.__init__(self)
        self.src = src
        self.dst = dst
        self.gz = gz
        self.level = level

        info("Cloning '%s' to '%s' (gz = %d)" % (src, dst, gz))
Example #4
0
 def __init__(self, modname, classname, ipcReady, port, fbplugin):
     Process.__init__(self)
     self.classname = classname
     self.modname = modname
     self.ipcReady = ipcReady
     self.port = port
     self.fbplugin = fbplugin
     self.loglevel = logging.getLogger().level
     self.fbloglevel = logging.getLogger("FB").level
     self.logformat = logging.getLogger().handlers[0].formatter._fmt
Example #5
0
def load():
	queue=Queue()
	hostfile_line=open(sys.argv[2],'r').readlines()
	source=sys.argv[3]
	destdir=sys.argv[4]
	for hostfile in hostfile_line:
		eachline=hostfile.split()
		queue.put(eachline)
		eachline=Process(target=TRANS,args=(queue.get(),source,destdir))
		eachline.start()
	eachline.join()
Example #6
0
def exe():
	queue=Queue()
	hostfile_line=open(sys.argv[2],'r').readlines()
	command_file=open(sys.argv[3],'r').readlines()
	for command_line in command_file:
		command_list=command_line.split('\n')
		command=''.join(command_list)
		for hostfile in hostfile_line:
			eachline=hostfile.split()
			queue.put(eachline)
			eachline=Process(target=SSH,args=(queue.get(),str(command)))
			eachline.start()
	eachline.join()
Example #7
0
def test():
    NUMBER_OF_PROCESSES = 4
    TASKS1 = [(mul, (i, 7)) for i in range(20)]
    TASKS2 = [(plus, (i, 8)) for i in range(10)]

    # Create queues
    task_queue = Queue()
    done_queue = Queue()

    # Submit tasks
    task_queue.putMany(TASKS1)

    # Start worker processes
    for i in range(NUMBER_OF_PROCESSES):
        Process(target=worker, args=(task_queue, done_queue)).start()

    # Get and print results
    print 'Unordered results:'
    for i in range(len(TASKS1)):
        print '\t', done_queue.get()

    # Add more tasks using `put()` instead of `putMany()`
    for task in TASKS2:
        task_queue.put(task)

    # Get and print some more results
    for i in range(len(TASKS2)):
        print '\t', done_queue.get()

    # Tell child processes to stop
    for i in range(NUMBER_OF_PROCESSES):
        task_queue.put('STOP')
 def test_possible_to_share_sqlalchemy_objects_across_process_boundaries_with_memcache(self):
     
     # write to the memcache in another process
     def write_to_cache():
         # create storage for subprocess
         mc = MemcacheStore(servers=[_MEMCACHED_HOST])
         sa = SqlAlchemyStore(uri="sqlite:///%s" % _SQLITE_FILENAME)
         ch = ChainedStore([mc, sa])
     
         # store a MultiprocBazbot object for retrieval in the main process
         bb = MultiprocBazbot(1, 2, 999)
         ch.put(bb)
 
     p = Process(target=write_to_cache)
     p.start()
     p.join()
 
     # create storage in the main process
     mc = MemcacheStore(servers=[_MEMCACHED_HOST])
     sa = SqlAlchemyStore(uri="sqlite:///%s" % _SQLITE_FILENAME)
     ch = ChainedStore([mc, sa])
 
     # FIXME: these unit tests should be valid, but for some reason running the Shove unit tests beforehand interacts with this
     # # accessing the MemcacheStore directly will not work
     # # since the ChainedStore is the only thing that can
     # # conceptually link it to the SqlAlchemyStore
     # bb_mc = mc.get(MultiprocBazbot, {"zap": 999})
     # self.assert_(isinstance(bb_mc, MultiprocBazbot))
     # self.assertRaises(Exception, lambda: bb_mc.zap)
     # self.assertRaises(Exception, lambda: bb_mc.foo)
     # self.assertRaises(Exception, lambda: bb_mc.bar)
     # self.assertRaises(Exception, lambda: bb_mc.bleh)
 
     # accessing the ChainedStore will work since it allows
     # the SqlAlchemyStore to execute its merge callbacks
     bb = ch.get(MultiprocBazbot, {"zap": 999})
     self.assert_(bb)
     self.assert_(isinstance(bb, MultiprocBazbot))
     self.assertEqual(bb.zap, 999)
     self.assertEqual(bb.foo, 1)
     self.assertEqual(bb.bar, 2)
     self.assertEqual(bb.bleh, "bleh")
     
def runpool(address, number_of_processes):
    # create a single server object -- children will each inherit a copy
    server = HTTPServer(address, RequestHandler)

    # create child processes to act as workers
    for i in range(number_of_processes - 1):
        Process(target=serve_forever, args=(server, )).start()

    # main process also acts as a worker
    serve_forever(server)
Example #10
0
def multiProcessTest(n, funcs):
    """Fork N processes and run a testing function in each."""
    if type(funcs) != list:
        funcs = [funcs] * n
    procs = []
    for f, args in funcs:
        procs.append(Process(target=f, args=args))
    for p in procs:
        p.start()
    for p in procs:
        p.join()
Example #11
0
	def cmdrun(self, cmd):
		comScanCmd = cmd
		queue = Queue()
		scanProc = Process(
			target=self.newProcExecuteCmd, args=[queue, comScanCmd])
		scanProc.start()
		# 等待5秒
		scanProc.join(10)
		try:
			scanResult = queue.get(timeout=5)
		except Exception as e:
			print "get cmd result error"
			scanResult = -1
		scanProc.terminate()
		return scanResult
Example #12
0
 def cmdrun(self, cmd):
     try:
         comScanCmd = cmd
         queue = Queue()
         scanProc = Process(target=self.newProcExecuteCmd,
                            args=[queue, comScanCmd])
         scanProc.start()
         scanProc.join(5)
         try:
             scanResult = queue.get(timeout=30)
             #print scanResult
         except Exception, e:
             print e
             print "get cmd result error: %s " % str(e)
             scanResult = -1
         scanProc.terminate()
         return scanResult
Example #13
0
    def __init__(self, processes=None, initializer=None, initargs=()):
        self._inqueue = SimpleQueue()
        self._outqueue = SimpleQueue()
        self._taskqueue = Queue.Queue()
        self._cache = {}
        self._state = RUN

        if processes is None:
            try:
                processes = processing.cpuCount()
            except NotImplementedError:
                processes = 1

        self._pool = [
            Process(target=worker,
                    args=(self._inqueue, self._outqueue, initializer,
                          initargs)) for i in range(processes)
        ]

        for i, w in enumerate(self._pool):
            w.setName('PoolWorker-' + ':'.join(map(str, w._identity)))
            w.start()

        self._task_handler = threading.Thread(
            target=Pool._handleTasks,
            args=(self._taskqueue, self._inqueue, self._outqueue, self._pool))
        self._task_handler.setDaemon(True)
        self._task_handler._state = RUN
        self._task_handler.start()

        self._result_handler = threading.Thread(target=Pool._handleResults,
                                                args=(self._outqueue,
                                                      self._cache))
        self._result_handler.setDaemon(True)
        self._result_handler._state = RUN
        self._result_handler.start()

        self._terminate = Finalize(
            self,
            Pool._terminatePool,
            args=(self._taskqueue, self._inqueue, self._outqueue, self._cache,
                  self._pool, self._task_handler, self._result_handler),
            exitpriority=5)
Example #14
0
def run_code():
    if not request.args:
        abort(400)
    pycode = request.args.get('code', '')
    if ("__class__" in pycode) or ("_module" in pycode):
        return jsonify("timed out! you have an infinite loop!")

    pysplit = pycode.splitlines()
    # print(pycode, file=sys.stderr)
    p = Process(target=exec, args=(pycode, myglobals))
    p.start()
    p.join(2)
    p.terminate()
    if p.exception:
        if p.exception == 1:
            return jsonify("no error!")
        tb = p.exception[1]
        if isinstance(p.exception[0], SyntaxError):
            return getSynTraceback(filename, pysplit, tb)
        return getTraceback(filename, pysplit, tb)
    return jsonify("timed out! you have an infinite loop!")
Example #15
0
def worker(inp, outp):
    for arg in iter(inp.get, 'STOP'):
        try:
            rez = dopcap(arg)
            os.unlink(arg)
        except Exception, e:
            print e
        print "Done: %s" % arg


task_queue = Queue()
done_queue = Queue()

for i in range(PROCS):
    Process(target=worker, args=(task_queue, done_queue)).start()


class CloseEvent(ProcessEvent):
    def process_IN_CLOSE_WRITE(self, event):
        task_queue.put("%s" %  os.path.join(event.path, event.name))
        print "Received: %s" % os.path.join(event.path, event.name)

wm = WatchManager()

notifier = Notifier(wm, CloseEvent())
wdd = wm.add_watch('/data2', pyinotify.IN_CLOSE_WRITE, rec=True)
while True:  # loop forever
    try:
        notifier.process_events()
        if notifier.check_events():
Example #16
0
def execInSandbox(fun, *args, **kwargs):
    from processing import Pipe, Process, Condition
    (r, w) = Pipe()

    # bind worker to the 'write' side of the pipe, and to 'fun'
    def worker(*args, **kwargs):
        # TODO: catch errors, and signal parent about them...
        try:
            w.send(fun(*args, **kwargs))
        except Exception, e:
            w.send(e)

    p = Process(target=worker, args=args, kwargs=kwargs)
    p.start()
    result = r.recv()
    if type(result) is Exception: raise result
    # check whether child exited OK..    .
    return result
Example #17
0
         #datetime(2004, 8,  4, 0, 0), 
         #datetime(2004, 8,  5, 0, 0), 
         #datetime(2004, 8,  6, 0, 0),
         
         #datetime(2004, 8,  9, 0, 0), 
         #datetime(2004, 8, 10, 0, 0), 
         #datetime(2004, 8, 11, 0, 0), 
         #datetime(2004, 8, 12, 0, 0), 
         #datetime(2004, 8, 13, 0, 0),
     ]
         
 # create Queues
 queue = BufferedQueue()
 result = Queue()
 
 p1 = Process(target=process_func, args=[queue, result])
 p2 = Process(target=process_func, args=[queue, result])
 p1.setStoppable(True); p2.setStoppable(True)
 p1.start(); p2.start()
 
 #strategy_args = random_strategies_generator_2(periods=(1,2,3,4,5), 
 #                                              strats_per_period=24)
 strategy_args = [
     (('long_tops', 4, 'low', 'HH', 6, 7), ('short_tops', 4, 'high', 'LL', 6, 7))
 ]
 
 for (s1, s2) in strategy_args:
     print s1, s2
 
 # create tasks for this batch
 tasks = {}
Example #18
0
            print "Process Number: %s didn't find a response for %s " % (i, ip)
            pass

def snmp_query(i,out):
    while True:
        time.sleep(.1)
        if out.empty():
            sys.exit()
            print "Process Number: %s" % i
        ipaddr = out.get()
        s = Snmp()
        h = HostRecord()
        h.ip = ipaddr
        h.snmp_response = s.query()
        print h
        return h

try:
     q.putmany(ips)
finally:
     for i in range(num_workers):
         p = Process(target=f, args=[i,q,oq])
         p.start()
     for i in range(num_workers):
         pp = Process(target=snmp_query, args=[i,oq])
         pp.start()
print "main process joins on queue"
p.join()
print "Main Program finished"

Example #19
0
        if out.empty():
            sys.exit()
            print "Process Number: %s" % i
        ipaddr = out.get()
        s = Snmp()
        h = HostRecord()
        h.ip = ipaddr
        h.snmp_response = s.query()
        print h
        return h
try:
    q.putmany(ips)

finally:
    for i in range(num_workers):
        p = Process(target=f, args=[i,q,oq])
        p.start()
    for i in range(num_workers):
        pp = Process(target=snmp_query, args=[i,oq])
        pp.start()

print "main process joins on queue"
p.join()
#while not oq.empty():
#    print "Validated", oq.get()


print "Main Program finished"


        # Check return value; if else than zero inform user
        if retVal == 0:
            mf.Cout("Process #%s is alive." % (iProcess))
        else:
            mf.Cout("Process #%s is not responding for IP Address %s." %
                    (iProcess, ip))


if __name__ == "__main__":
    mf.StopWatchStart()

    # Loop over all IP addresses
    for ip in ipAddresses:
        # Put an item into the queue
        queue.put(ip)

    # Loop over a given number of processes;
    for iProcess in range(nProcesses):
        # Create process
        p = Process(target=f, args=[iProcess, queue])
        # Start process
        p.start()

    mf.Cout("Main process joins on queue.")
    # Join procees to on queue so that all processes are gotten and processed before exiting the program.
    p.join()
    mf.Cout("Main program finished.")

    #timer.sleep(5)
    mf.StopWatchStop()
Example #21
0
            pass

            print "ok"


def snmp_query(i, out):
    while True:
        time.sleep(.1)
        if out.empty:
            sys.exit()
            print "Process Number: %s" % i
        ipaddr = out.get()
        s = Snmp()
        h = HostRecord()
        h.snmp_response = s.query()
        print h
        return h


try:
    q.put(ips)
finally:
    for i in range(num_workers):
        pp = Process(target=f, args=[i, q, oq])
        pp.start()

print "main process joins on queue"
P.join()

print "Main Program finshed"
Example #22
0
def main():
    """Start the Feedback Controller."""
    
    # Get Options
    description = """Feedback Controller"""
    usage = "%prog [Options]"
    version = """
Copyright (C) 2007-2010 Bastian Venthur <venthur at cs tu-berlin de>

Homepage: http://bbci.de/pyff

This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
    parser = OptionParser(usage=usage, version=version, description=description)
    parser.add_option('-l', '--loglevel', type='choice', 
                      choices=['critical', 'error', 'warning', 'info', 'debug', 'notset'], 
                      dest='loglevel', 
                      help='Which loglevel to use for everything but the Feedbacks. Valid loglevels are: critical, error, warning, info, debug and notset. [default: warning]', 
                      metavar='LEVEL')
    parser.add_option('--fb-loglevel', type='choice', 
                      choices=['critical', 'error', 'warning', 'info', 'debug', 'notset'], 
                      dest='fbloglevel', 
                      help='Which loglevel to use for the Feedbacks. Valid loglevels are: critical, error, warning, info, debug and notset. [default: warning]', 
                      metavar='LEVEL')
    parser.add_option('-p', '--plugin', dest='plugin',
                      help="Optional Plugin, the Feedback Controller should inject into the Feedback.",
                      metavar="MODULE")
    parser.add_option('-a', '--additional-feedback-path', dest='fbpath',
                      help="Additional path to search for Feedbacks.",
                      metavar="DIR")
    parser.add_option('--port', dest='port',
                      help="Set the Parallel port address to use. Windows only. Should be in Hex (eg: 0x378)",
                      metavar="PORTNUM")
    parser.add_option("--nogui", action="store_true", default=False, 
                      help="Start without GUI.")

    options, args = parser.parse_args()

    # Initialize logging
    str2loglevel = {'critical' : logging.CRITICAL,
                    'error'    : logging.ERROR,
                    'warning'  : logging.WARNING,
                    'info'     : logging.INFO,
                    'debug'    : logging.DEBUG,
                    'notset'   : logging.NOTSET}
    
    loglevel = str2loglevel.get(options.loglevel, logging.WARNING)
    fbloglevel = str2loglevel.get(options.fbloglevel, logging.WARNING)

    logging.basicConfig(level=loglevel, format='[%(process)-5d:%(threadName)-10s] %(name)-25s: %(levelname)-8s %(message)s')
    logging.info('Logger initialized with level %s.' % options.loglevel)
    logging.getLogger("FB").setLevel(fbloglevel)
    
    # get the rest
    plugin = options.plugin
    fbpath = options.fbpath
    guiproc = None
    if not options.nogui:
        guiproc = Process(target=GUI.main)
        guiproc.start() 
        
    port = None
    if options.port != None:
        port = int(options.port, 16)
    try:
        fc = FeedbackController(plugin, fbpath, port)
        fc.start()
    except (KeyboardInterrupt, SystemExit):
        logging.debug("Caught keyboard interrupt or system exit; quitting")
    except:
        logging.error("Caught an exception, quitting FeedbackController.")
        print traceback.format_exc()
    finally:
        print "Stopping FeedbackController...",
        fc.stop()
        if guiproc:
            guiproc.terminate()
        print "Done."
Example #23
0
# from urllib2 import urlopen
# api_key = 2e714d06ee6612e6e746d6abd9f3b7a9
# weather = urlopen('http://api.openweathermap.org/data/2.5/weather?lat=')
	
# import required functions
from AndyPi_LCD import AndyPi_LCD
from processing import Process
import time
import feedparser
	
if __name__ == '__main__':
	# initial check for latest rss feed
	msg=feedparser.parse('http://feeds.bbci.co.uk/news/rss.xml?edition=uk').entries[0].title
	lcd=AndyPi_LCD()  # set name of imported class
	lcd.lcd_init()    # initialise LCD
	lcd.led(512)      # turn backlight fully on
	
	while True:
		# setup a new thread process, in which to run the lcd.scroll_clock function, with the correct arguments
		p = Process(target=lcd.scroll_clock, args=(1, "c", 0.3, msg))
		# start the process
		p.start()
		# wait for 30 seconds (or however long you wish to wait between checking updates)
		time.sleep(30.0)
		# while the python is scrolling the LCD message in the 'p' process
		# check for new rss feed, and put in variable "msg"
		msg=feedparser.parse('http://feeds.bbci.co.uk/news/rss.xml?edition=uk').entries[0].title
		# stop the scroller process
		p.terminate()

Example #24
0
# All other required modules here
from processing import Process, Queue
import time

def f(queue):
    x = queue.get()
    mf.Cout("Process number %s, sleeps for %s seconds" % (x,x))
    time.sleep(x)
    mf.Cout("Process number %s finished" % (x))
    
if __name__ == "__main__":
    mf.StopWatchStart()
    # Create a queue object 
    queue = Queue()
    # Create 10 processes
    for i in range(10):
        # Put an item into the queue. 10 queues in total
        queue.put(i)
        # Declare the process
        mf.Cout("Creating process #%s" % (i))
        i = Process(target = f, args=[queue]) #for a Thread: threading.Thread(target = f, args=(queue)) => Similar structure
        # Start the process
        i.start()

    mf.Cout("Main process joins on queue")
    # Block process until all items in the Queue have been gotten and processed
    i.join()
    mf.Cout("Main program finished")

    mf.StopWatchStop()
Example #25
0
        else:
            print "Process Number: %s didn't find a response for %s" % (i,ip)
            pass

            print  "ok"
def snmp_query(i,out):
    while True:
        time.sleep(.1)
        if out.empty:
            sys.exit()
            print "Process Number: %s" % i
        ipaddr = out.get()
        s = Snmp()
        h = HostRecord()
        h.snmp_response = s.query()
        print h
        return h

try:
    q.put(ips)
finally:
    for i in range(num_workers):
        pp = Process(target=f,args=[i,q,oq])
        pp.start()

print "main process joins on queue"
P.join()

print "Main Program finshed"
           
Example #26
0
def main():
    token = 'YOUR TOKEN'
    server_join = requests.get(
        'https://api.vk.com/method/groups.getLongPollServer',
        params={
            'access_token': token,
            'v': '5.101',
            'group_id': 'ID GROUP'
        }).json()['response']

    while True:
        server = requests.get(
            '{server}?act=a_check&key={key}&ts={ts}&wait=25 '.format(
                server=server_join['server'],
                key=server_join['key'],
                ts=server_join['ts'])).json()
        updates = server['updates']

        for new in updates:
            obj = new['object']
            if new['type'] == "message_new":
                from_ids = obj['from_id']
                peer_id = obj['peer_id']
                texts = obj["text"]
                text_rm = 0
                if "reply_message" in obj:
                    reply_message = obj['reply_message']
                    print(reply_message)
                    text_rm = reply_message['text']
                    id_rm = reply_message['from_id']

                    param = {
                        'access_token': token,
                        'v': '5.101',
                        'user_id': int(id_rm),
                        'fields': 'photo_id, photo_50'
                    }
                    repl = requests.get('https://api.vk.com/method/users.get',
                                        params=param).json()['response'][0]
                    reply_photo = repl['photo_50']
                    reply_name = repl['first_name'] + " " + repl['last_name']

                elif 'fwd_messages'[0] in obj:
                    reply_message = obj['fwd_messages'][0]
                    print("REPLY = " + str(reply_message))
                    text_rm = reply_message['text']
                    id_rm = reply_message['from_id']
                    param = {
                        'access_token': self.token,
                        'v': '5.101',
                        'user_id': int(id_rm),
                        'fields': 'photo_id, photo_50'
                    }
                    repl = requests.get('https://api.vk.com/method/users.get',
                                        params=param).json()['response'][0]
                    reply_photo = repl['photo_50']
                    reply_name = repl['first_name'] + " " + repl['last_name']

                if '!quote' in str(texts).lower() and text_rm != 0:
                    url = reply_photo[:-6]  # type: Any
                    reply_photo = url[-7:]
                    myUrl = str(url)
                    myFile = "image\\" + str(myUrl[-7:])
                    request.urlretrieve(myUrl, myFile)
                    photo_obrabotka = Process(reply_photo, text_rm, reply_name)
                    adress = photo_obrabotka.paint_text()
                    print(adress)
                    name_photo = url[-7:]

                    param = {
                        'access_token': token,
                        'v': '5.101',
                    }
                    file = {'photo': open(adress, 'rb')}
                    upload1 = requests.get(
                        'https://api.vk.com/method/photos.getMessagesUploadServer',
                        params=param,
                    ).json()['response']
                    urls = upload1['upload_url']
                    upload2 = requests.post(urls, files=file).json()
                    upload3 = requests.get(
                        'https://api.vk.com/method/photos.saveMessagesPhoto',
                        params={
                            'access_token': token,
                            'v': '5.101',
                            'photo': upload2['photo'],
                            'server': upload2['server'],
                            'hash': upload2['hash']
                        }).json()['response'][0]
                    upload4 = 'photo{}_{}'.format(upload3['owner_id'],
                                                  upload3['id'])
                    param = {
                        'access_token': token,
                        'v': '5.101',
                        'peer_ids': peer_id,
                        'attachment': upload4,
                        'random_id': 0
                    }
                    self_messege = requests.get(
                        'https://api.vk.com/method/messages.send',
                        params=param).json()

            server_join['ts'] = server['ts']
Example #27
0
#/usr/bin/env python
# -*- coding: utf-8 -*- 

from processing import Process, Queue
import time


def f(q):
    x = q.get()
    print "Process number %s, sleeps for %s seconds" %(x, x)
    time.sleep(x)
    print "Process number %s finished"  % x
    q = Queue()


for i in range(10):
    q.put(i)
    i = Process(target=f, args=[q]
    i.start()
    print "main process joins on queue"
    i.join()


print "Main Program finished"

Example #28
0
def algo(request, algo):
    text = ""
    type = ""
    algo_object = get_object_or_404(Algo, shortTitle=algo)
    manual = get_object_or_404(ManPage, algo=algo_object)
    
    embedFormDict = { 'cpt' : CPTEmbedForm,
                   'f5' : F5EmbedForm,
                   'lsb' : LsbEmbedForm,
                   'gifshuffle' : GifShuffleEmbedForm,
                   'bs' : BattlestegEmbedForm, }
    
    extractFormDict = { 'cpt' : CPTExtractForm,
                   'f5' : F5ExtractForm,
                   'lsb' : LsbExtractForm,
                   'gifshuffle' : GifShuffleExtractForm,
                   'bs' : BattlestegExtractForm, }
    typeDict = { 'cpt' : "png",
               'f5' : "jpeg",
               'lsb' : "png",
               'gifshuffle' : "gif",
               'bs' : "png", }
    
    if request.method == 'POST':
        
        q = Queue()
        # embedding
        if "submit1" in request.POST:
            
            algoDict = { 'cpt' : cptEmbed,
                           'f5' : f5Embed,
                           'lsb' : lsbEmbed,
                           'gifshuffle' : gifShuffleEmbed,
                           'bs' : bsEmbed, }
            
            embedForm = embedFormDict[algo](request.POST, request.FILES)
            extractForm = extractFormDict[algo]()
            type = typeDict[algo]
            p = Process(target=algoDict[algo], args=(q, ))

            # fork process to embed
            if embedForm.is_valid():
                p.start()
                q.put([request.POST, request.FILES['file'].temporary_file_path()])
                os.system("sleep 1")
                try:
                    retval = q.get(True, 10)
                except Q.Empty:
                    retval = -2
                p.join()
                if retval == -1:
                    text += "%s-Datei nicht gefunden oder fehlerhaft."%(type)
                elif retval == -2:
                    text += "Fehler beim Einbetten. Anderes Bild oder andere Parameter versuchen."
                else:
                    return createResponse(retval, type)
        # extracting        
        elif "submit2" in request.POST:
             
            algoDict = { 'cpt' : cptExtract,
                           'f5' : f5Extract,
                           'lsb' : lsbExtract,
                           'gifshuffle' : gifShuffleExtract,
                           'bs' : bsExtract, }

            embedForm = embedFormDict[algo]()
            extractForm = extractFormDict[algo](request.POST, request.FILES)
            type = typeDict[algo]
            p = Process(target=algoDict[algo], args=(q, ))
            
            # fork process to extract
            if extractForm.is_valid():
                p.start()
                q.put([request.POST, request.FILES['file'].temporary_file_path()])
                try:
                    retval = q.get(True, 10)
                except Q.Empty:
                    retval = -2
                p.join()
                if retval == -1:
                    text += "%s-Datei nicht gefunden oder fehlerhaft."%(type)
                elif retval == -2:
                    text += "Fehler beim Ausbetten. Anderes Bild oder andere Parameter versuchen."
                else:
                    #print retval
                    text += retval
    # empty form
    else:
        embedForm = embedFormDict[algo]()
        extractForm = extractFormDict[algo]()
    # render
    return render_to_response("stego_algo.html", {'algo' : algo_object,
                                            'embedForm' : embedForm, 
                                            'extractForm' : extractForm,
                                            'text' : text,
                                            'algo_type' : 'Staganographie',
                                            'manual' : manual,}) 
Example #29
0
 def __init__(self, service, db_conn_string):
     self.service = service
     self.db_conn_string = db_conn_string
     Process.__init__(self)
Example #30
0
 def rpc_processTest(self, **kwargs):
     self.app.setThermo('build_pdf', 0, 'Preparo elaborazione', 10, command='init')
     p = Process(target=self.testOtherProcess, args=(self.pageLocalDocument('testOtherProcess'),), name='pippo')
     p.start()
Example #31
0
 start = 0
 end = 0
 for i in range(PLATFORMDIVISION):
     start_index.append(start)
     end = start + bids_number_pertime
     if bids_number_reminder > 0:
         end += 1
         bids_number_reminder -= 1
     end_index.append(end)
     start = end
 cur_db_list = getCursors(conn_db, n=PLATFORMDIVISION)
 print cur_db_list
 #重复白名单
 platform_id_white_list = getPlatformIdList("repeatwhiteplatform_id.txt")
 for i in range(PLATFORMDIVISION):
     j = Process(target=readSQLReturn,
                 args=[[start_index[i], end_index[i], cur_db_list[i]]])
     j.start()
 j.join()
 print "共有" + str(bids_number) + "个标."
 stringSQL = "SELECT count(*) FROM " + dstdb_info
 cur_db.execute(stringSQL)
 good_number = cur_db.fetchone()[0]
 print "在满额限制为" + str(FULLBIDPERCENT) + "的情况下, 只有" + "%.2f" % (
     100 * (float(good_number) / bids_number)) + "%的数据是可用的."
 #changeValue("./clean_date.xml","clean_date_lasttime",clean_date_thistime)
 fp.close()
 #将所有的platform_id输出到文件
 fp = open("platform_id_list_info.txt", "w")
 for platform_id in platform_id_set:
     fp.write(str(platform_id))
     fp.write("\n")
 start = 0
 end = 0
 for i in range(PLATFORMDIVISION): 
     start_index.append(start)
     end = start + bids_number_pertime
     if bids_number_reminder > 0:
         end += 1
         bids_number_reminder -= 1
     end_index.append(end)
     start = end
 cur_db_list = getCursors(conn_db, n=PLATFORMDIVISION)
 print cur_db_list
 #重复白名单
 platform_id_white_list = getPlatformIdList("repeatwhiteplatform_id.txt")
 for i in range(PLATFORMDIVISION):
     j=Process(target=readSQLReturn,args=[[start_index[i], end_index[i], cur_db_list[i]]])  
     j.start()
 j.join() 
 print "共有" + str(bids_number) + "个标."
 stringSQL = "SELECT count(*) FROM " + dstdb_info
 cur_db.execute(stringSQL)
 good_number = cur_db.fetchone()[0]
 print "在满额限制为"+ str(FULLBIDPERCENT) + "的情况下, 只有" + "%.2f" % (100 * (float(good_number)/bids_number)) + "%的数据是可用的."
 #changeValue("./clean_date.xml","clean_date_lasttime",clean_date_thistime)
 fp.close()
 #将所有的platform_id输出到文件
 fp = open("platform_id_list_info.txt","w")
 for platform_id in platform_id_set:
     fp.write(str(platform_id))
     fp.write("\n")
 fp.close()
Example #33
0
ips = IP("10.0.1.0/24")


def f(i, q):
    while True:
        if q.empty():
            sys.exit()
        print "Process Number: %s" % i
        ip = q.get()
        ret = subprocess.call("ping -c 1 %s" % ip,
                              shell=True,
                              stdout=open('/dev/null', 'w'),
                              stderr=subprocess.STDOUT)
        if ret == 0:
            print "%s: is alive" % ip
        else:
            print "Process Number: %s didn't find a response for %s" % (i, ip)


for ip in ips:
    q.put(ip)
#q.put("192.168.1.1")

for i in range(50):
    p = Process(target=f, args=[i, q])
    p.start()

print "main process joins on queue"
p.join()
print "Main Program finished"
Example #34
0
 def __init__(self, service, db_conn_string):
     self.service = service
     self.db_conn_string = db_conn_string
     Process.__init__(self)
Example #35
0
Frame, Concept, Frequency, Relation, Batch
from csc.corpus.models import Language, Sentence
from csc.nl.models import FunctionClass, FunctionWord
from events.models import Event, Activity
from voting.models import Vote
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
if __name__ == '__main__':
    serializer = serializers.get_serializer("myyaml")()
    from csc.conceptnet4.models import RawAssertion, Assertion, SurfaceForm,\
    Frame, Concept, Frequency, Relation, Batch
    from csc.corpus.models import Language, Sentence
    from csc.nl.models import FunctionClass, FunctionWord
    from events.models import Event, Activity
    from voting.models import Vote
    from django.contrib.auth.models import User
    from django.contrib.contenttypes.models import ContentType
    
    classes = [Vote, RawAssertion, Frame, SurfaceForm, Assertion,
    Relation, Frequency, Concept, FunctionClass, FunctionWord, Language, 
    Sentence, User, ContentType, Activity, Batch]
    
    classes1 = classes[0:5]
    classes2 = classes[5:10]
    classes3 = classes[10:15]
    classes4 = classes[15:]

    for working_set in (classes1, classes2, classes3, classes4):
        proc = Process(target=worker, args=[working_set])
        proc.start()
Example #36
0
                "task_id": task_id,
            }
            tasks[task_id] = task
            strategy_id_task_map[strategy_id].append(task_id)
            task_id += 1
        strategy_results[strategy_id] = 0
        strategy_id += 1

    # create Queues
    queue = BufferedQueue()
    result = Queue()
    queue.putmany(tasks.values())
    # create 2 Processes
    processes = []
    for i in range(2):
        p = Process(target=process_func, args=[queue, result])
        p.setStoppable(True)
        p.start()
        processes.append(p)

    wanted = 10
    criterium = nr_days * ticker_details["increment"] * 3  # 'arbitrary' criterium
    got = set()

    for i in range(len(tasks)):
        result_dict = result.get()
        for task_id, report in result_dict.items():
            tsk = tasks.get(task_id)
            tsk.update({"report": report})
            strategy_id = tsk["strategy_id"]
            for order_id, order in report.items():