Exemplo n.º 1
0
def main():
    c = 0
    a = threading.Thread(target=work1)
    b = threading.Thread(target=work2)
    e = multiprocessing.process(target=work1)
    d = multiprocessing.process(target=work2)
    e.start()
Exemplo n.º 2
0
def process_wrapper(fname, chunkStart, chunkSize, outfile="b_output.txt"):
    with open(fname) as f:
        f.seek(chunkStart)
        lines = f.read(chunkSize).splitlines()
        for line in lines:
            process(line)
            outfile = open(outfile, 'a')
            outfile.write(line + '\n')  # python will convert \n to os.linesep
            outfile.close()
def main():
    quant = [10000, 20000, 50000, 100000]

    p1 = process(target=calcula, args=(quant[0], )).start().join()
    p2 = process(target=calcula, args=(quant[1], )).start().join()
    p3 = process(target=calcula, args=(quant[2], )).start().join()
    p4 = process(target=calcula, args=(quant[3], )).start().join()

    desenhaGrafico(quant, graf_tempo, "Tamanho", "Tempo", "saida_time")
    desenhaGrafico(quant, graf_operacoes, "Tamanho", "Operações",
                   "saida_operacoes")
Exemplo n.º 4
0
    def generate_multisig(self, mon, sf=0, poolsize=8):
        m, n = mon
        n += sf
        if not self.usingbip:
            poolsize = poolsize if (n - sf) >= poolsize else (n - sf)

            main_path = self.showpath(self.path)

            info = partial(self.info_multisig, main_path=main_path)

            if poolsize > 1:
                with process(poolsize) as pool:
                    result = pool.map(info, range(sf, n))
            else:
                result = list(map(info, range(sf, n)))

            path = main_path + "{sf}~{n}".format(sf=sf, n=n)
            key = [r[0] for r in result]
            monscript = MoNscript(m, n, key)
            ins = self.custom_addr_type(monscript, testnet=self.testnet)
            address = ins.address
            redeemscript = ins.redeemscript

            results = [path] + list(
                zip(*result)) + [address, monscript, redeemscript]

            return results

        raise ParameterError(
            "bip44/bip49/bip84 should not be used to create custom address.",
            self.bip, "m/4'")
Exemplo n.º 5
0
def filecz():
    try:
        f = open(r'')
        print(f.read())
    finally:
        if f:
            f.close()

    with open(r'', 'r') as filereader:
        print(filereader.read())
        for line in filereader.readlines():
            print(line.strip())

    print(os.getpid())
    if __name__ == '__main__':
        print(os.getpid())
        for i in range(5):
            p = process(target=filecz, args=(str(i), ))
            p.start()
        p.join()
    time.sleep(random.random() * 3)
    if __name__ == '__main__':
        p = pool(process=3)
        for i in range(5):
            p.apply_async(filecz, args=(i, ))
        p.close()
        p.join()
Exemplo n.º 6
0
 def main_run(self):
     while True:
         new_socket, client_addr = self.tcp_server_socket.accept()
         p = multiprocessing.process(target=self.service_client, args=(new_socket,))
         p.start()
         new_socket.close()
     self.tcp_server_socket.close()
Exemplo n.º 7
0
    def to_importmulti(self, n=1, sf=0, poolsize=8, mon=None):

        __format = OrderedDict({
            'scriptPubKey': {
                "address": ""
            },
            # "witnessscript": "", multigsig and witness
            "label": "",
            "timestamp": "now",
            "pubkeys": [],
            # "redeemscript":"", P2SH need
            "keys": [],  # priv key
            "watchonly": False
        })

        lx = []

        if not mon:
            n += sf
            poolsize = poolsize if (n - sf) >= poolsize else (n - sf)

            main_path = self.showpath(self.path)

            info = partial(self.info, main_path=main_path, extra=True)
            if poolsize > 1:
                with process(poolsize) as pool:
                    results = pool.map(info, range(sf, n))
            else:
                results = list(map(info, range(sf, n)))

            for result in results:
                __format["scriptPubKey"]["address"] = result[1]
                __format["label"] = result[0].replace("'", "h")
                __format["pubkeys"] = [result[2]]
                __format["keys"] = [result[3]]
                if result[4]:
                    # P2WPK-P2SH
                    __format["redeemscript"] = result[4]
                lx.append(__format)

            return lx, json.dumps(lx)  # original data, importmulti format

        result = self.generate_multisig(mon=mon, poolsize=poolsize, sf=sf)
        __format["scriptPubKey"]["address"] = result[3]
        __format["label"] = result[0].replace("'", "h")
        __format["pubkeys"] = result[1]
        __format["keys"] = result[2]
        if self.custom_addr_type != P2SH:
            # P2WSH / P2WSH-PSH
            if result[5]:
                __format["redeemscript"] = result[5]

            __format["witnessscript"] = result[4]
        else:
            # P2SH
            __format["redeemscript"] = result[4]

        return __format, json.dumps([__format])
Exemplo n.º 8
0
 def login(self):
     if not self.__cookieIsValid:
         self.__createQrcode(self.__getQrcode())
         showQrcodeProcess = process(target=self.showQrcode)
         showQrcodeProcess.start()
         while not self.__checkQrcode():
             sleep(1)
             pass
         self.__closeQrcode(showQrcodeProcess.pid)
         self.__testTicket()
Exemplo n.º 9
0
 def wrapped(*args, **kwargs):
     q = multiprocessing.queues.Queue(3)
     proc = multiprocessing.process(target=main, args=(q, args, kwargs))
     proc.start()
     try:
         while True:
             item = q.get()
             yield item
     finally:
         proc.terminate()
         proc.join()
Exemplo n.º 10
0
def run():
    core_num = mp.cpu_count()
    bucket_size = (len(urls) // core_num) + 1
    reward_lst = mp.Manager().list()
    jobs = []
    for i in range(core_num):
        url_bucket = urls[i * bucket_size, (i + 1) * bucket_size]
        p = mp.process(target=fetch, args=(
            url_bucket,
            reward_lst,
        ))
        p.start()
        jobs.append(p)
    [p.join() for p in jobs]
def run_locusts_with_processes(sys_argv, processes_count):
    processes = []
    manager = multiprocessing.Manager()

    for _ in range(processes_count):
        p_slave = multiprocessing.process(target=start_slave, args=(sys_argv,))
        p_slave.daemon = True
        p_slave.start()
        processes.append(p_slave)

    try:
        if "--slave" in sys_argv:
            [processes.join() for process in processes]
        else:
            start_master(sys_argv)
    except KeyboardInterrupt:
        manager.shutdown()
Exemplo n.º 12
0
    def generate(self, n=1, sf=0, poolsize=8, raw=True):
        n += sf
        poolsize = poolsize if (n - sf) >= poolsize else (n - sf)
        if raw:
            # If false, means user NEED THIS
            raw = False if self.show_extend_key else True
        main_path = self.showpath(self.path)

        info = partial(self.info, main_path=main_path)
        if poolsize > 1:
            with process(poolsize) as pool:
                result = pool.map(info, range(sf, n))
        else:
            result = list(map(info, range(sf, n)))

        if raw:
            return result

        return self.details(addr=result)
Exemplo n.º 13
0
def main():
	processes = []
	number_list = [x for x in range(1,100000001)]
	result_queue =queues()
	index = 0
	# 启动8个进程将数据切片后进行运算
	for _ in range(8):
		p = process(target=task_handler,args=(number_list[index:index + 12500000],result_queue))
		index += 12500000
		processes.append(p)
		p.start()
	# 开始记录所有进程执行完成花费的时间
	start = time()
	for p in processes:
		p.jion()
	#合并执行结果
	total = 0
	while not result_queue.empty():
		total += result_queue.get()
	print(total)
	end = time()
	print('Execution time: ',(end - start), 's',sep='')
Exemplo n.º 14
0
def run():
    ctx = parse_cmd_line()
    # h_pcap = None
    #dev_name = ctx.dev_name.encode('utf-8')
    #snap_len = ctx.snap_len
    #promisc = ctx.promiscuous
    #timeout = ctx.timeout
    #err_buf = ctypes.create_string_buffer(PCAP_ERR_BUF_SZ)

    # nodes:
    #   pcap input
    #   ethernet parser
    #   output for mac addresses
    #   output for ethernet flows
    # datastructures
    #   packet table -- memached entries
    #   mac address table -- redis
    #   ethernet flow table -- redis
    # datastores
    #   mac 
    # pcap input gets packet from device
    # pcap iput copies packet to packet table
    # pcap input passes packet table packet ref (index of packet in table) to parser
    # parser gets packet ref from pcap input
    # parser parses ethernet headers: mac addresses and ethernet flows
    # parser

    int_name = "Ethernet"
    
    pcap_input_proc = multiprocessing.process(target=pcap_input.run, 
                                              args=(int_name))

    # parsing process

    
    

        #print("pkt str: \"{}\", len: {}".format(pkt_str, pkt_len))
    print_mac_addresses(mac_addresses)
Exemplo n.º 15
0
    print('睡午觉')
    time.sleep(2)
    print('睡晚觉')
    time.sleep(3)


def th3():
    print(os.getppid(), '----', os.getpid())
    print('打豆豆')
    time.sleep(2)
    print('打豆豆')
    time.sleep(2)


things = [th1, th2, th3]
process = []

for th in things:
    p = mp.process(target=th)
    p.daemon = True
    process.append(p)
    p.start
# for p in process:
# 	p.join()

print('++++++++++++父进程+++++++++++++++')

th1()
th2()
th3()
Exemplo n.º 16
0
     data_train = var_init_mean_adder(df=data_train.copy(), 
                                      columns=sensors_list)
     data_test = var_init_mean_adder(df=data_test.copy(), 
                                     columns=sensors_list)
     
     # denoising
     # choose denoising algorithms
     denoising_algos = ['none', 'savgol', 'lwss', 'kalman']
     # multiprocessing: start a process and call denoise_data function for each 
     # denoising algorithm
     processes = []
     for i in denoising_algos:
         prc_kwargs = {'data_train':data_train.copy(), 
                       'data_test':data_test.copy(),  
                       'sensors_list':sensors_list, 'dm':i}
         p = process(target=denoise_data, kwargs=prc_kwargs)
         processes.append(p)
         p.start()
     for prc in processes:
         prc.join()
     # store test_rul to cwd
     np.save('test_rul', test_rul)
 
     
 # check if mode passed as system argument requires estimation
 if 'all' in sys.argv or 'estimation' in sys.argv:
     # define simulation results class
     class SimResult():
         
         __slots__ = ('predictions', 'truevals', 'res_df', 'train_mode')
         
Exemplo n.º 17
0
'''
Created on 2017年3月8日

@author: Administrator
'''
import multiprocessing


def hello(name):
    '''say hello'''
    print('hello %s' % name)
    return


if __name__ == '__main__':
    p1 = multiprocessing.process(target=hello, args=('joy', ))
    p1.start()
Exemplo n.º 18
0
import requests
import login
import functions
import multiprocessing


def func(num):
    print num


if __name__ == '__main__':
    #s = requests.session()
    #headers = {'Authorization': 'Token' + login.login(s)}
    lst = []
    for i in range(1, 10):
        lst.append(multiprocessing.process(target=func, agrs=(i)))
Exemplo n.º 19
0
from TweetQuotes import Tweets
from ReplyToMentions import MentionsReply
import time
import multiprocessing


def Tweet():
    Tweets()


def ReplyMentions():
    MentionsReply()


p1 = multiprocessing.process(target=Tweet())
p2 = multiprocessing.process(targer=ReplyMentions())

p1.start()
p2.start()
p1.join()
p2.join()
Exemplo n.º 20
0
import os
from multiprocessing import process

def run_proc(name):
    print ('child process %s(%s) running...'%(name,os.getpid()))
if __name__=="__main__":
    print ('Parents process %s.'%os.getpid())
    for i in range(5):
        p=process(target=run_proc,args=(str(i),))
        print ('process will start.')
        p.start()
    p.join()
    print ('process end.')
Exemplo n.º 21
0
import time
import multiprocessing
result = []


def square(numbers):
    global result
    for i in numbers:

        result.append(i * i)
    print('result:' + str(result))


if __name__ == "main":

    arr = [1, 2, 3]

    p1 = multiprocessing.process(target=square, arg=(arr, ))
    p1.start()
    p1.join()
    print("result:", +str(result))
Exemplo n.º 22
0
#coding:utf-8

'''
Created on 2016年7月21日

@author: xiongqiao
'''


import multiprocessing

def worker(num):
    print('Workder:{num}'.format(num = num))


if __name__ == '__main__':
    jobs = []
    for i in range(5):
        p = multiprocessing.process(target = worker, args = (i, ))
        jobs.append(p)
        p.start()
 def __agent_start(self):
     pro=process(target=self.__agent_simulation(self.que, self.targetip, self.targetport, self.localip, ))
     pro.daemon=True
     pro.start()
     pro.join()
     return pro
Exemplo n.º 24
0
        yield i / 100.0
        i += 1


if __name__ == "__main__":
    f = open(FILENAME, "wb")
    f.write(SLOTSIZE * SLOTS * b'\0')
    f.close()
    f = open(FILENAME, "r+b")
    mapf = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_WRITE)

    ptbl = []
    for slot in range(SLOTS):
        offset = slot * SLOTSIZE
        mapf[offset] = EMPTY
        p = mp.process(target=run, args=(slot, ))
        ptbl.append(p)
        print("Starting", p)
        p.start()

    numseq = numbers()
    b = next(numseq)
    c = next(numseq)
    for i in range(4):
        for slot in range(SLOTS):
            a, b, c = b, c, next(numseq)
            offset = slot * SLOTSIZE
            while mapf[offset] != EMPTY:
                time.sleep(0.01)
            mapf[offset + 1:offset + SLOTSIZE] = packslot(
                slot, "*******", a, b, c)[1:]
Exemplo n.º 25
0
def main():
    print('{} [SERVER]: Server running on port {}'.format(
        helper.timestamp(), PORT))

    # Create Server (socket) and bind it to a address/port.
    server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    server.bind((HOST, PORT))
    server.listen(MAX_CLIENTS)
    print('{} [SERVER]: Listening for (client) sockets'.format(
        helper.timestamp()))

    while True:
        # Accept client connection
        client, address = server.accept()
        client.settimeout(5.0)

        if helper.isBlacklisted(
                address[0]) or helper.getClientCount() == MAX_CLIENTS:
            # Close connection since they are blacklisted or
            # there are already the max number of clients connected.
            print('{} [SERVER]: Blacklisted IP {} attempted to connect'.format(
                helper.timestamp(), address[0]))
            client.close()
            continue  # Wait for next client

        # Only continue with this client if they send a confirmation message.
        # This is sort of a second handshake before the client establishes a video stream to server.
        try:
            confirmation = client.recv(
                1024).decode()  # Client should be sending confirmation
        except (socket.timeout, UnicodeDecodeError):
            # Client did not send decodable confirmation in time. Add them to the blacklist if they are unrecognized.
            if not helper.isWhitelisted(address[0]):
                helper.addToBlackList(address[0])
                print(
                    '{} [SERVER]: IP {} has been blacklisted for failing to confirm the connection'
                    .format(helper.timestamp(), address[0]))
                client.close()
                continue  # Wait for next client

        # Whitelist client IP, since they connected successfully.
        helper.addToWhiteList(address[0])

        # Begin a process for this client's video stream
        helper.updateClientCount(helper.getClientCount() + 1)
        print('{} [SERVER]: Socket {} connected as client {}'.format(
            helper.timestamp(), address, helper.getClientCount()))

        # Create, save, and start process (camera stream)
        p = process(target=stream_camera,
                    args=(
                        client,
                        address,
                        helper.getClientCount(),
                    ))
        PROCESSES.append(p)
        p.start()

    # Clear PROCESSES
    for p in PROCESSES:
        p.join()  # Join all PROCESSES

    socket.close()  # TODO: see if this should be server.close() instead
Exemplo n.º 26
0
###############################################   Text FILE    ###############################################


###############################################   Multiprocess    ###############################################
def begin():
   GAN = Trainer()
   GAN.Train()
###############################################   Multiprocess    ###############################################

if __name__== '__main__':
    t1 = Loss_Log_Thread("Thread 1", 12)
    t1.start()
    t2 = ATHEN_CSV_Thread("Thread 2", 12)
    t2.start()
    p = process(Target =begin())
    t1.join()
    t2.join()







#import _thread
#import threading

# def print_epoch(name_of_thread, delay):
#         Trainer.Train()
#         time.sleep(delay)
Exemplo n.º 27
0
        value = 0
        cur2 = conn2.execute('insert into log (idValue) values(%s)' %
                             value)  #initialize the table with the idValue 0
    cur2 = conn2.cursor()

    portList = getAllPorts()
    port = portList[0]
    for i in portList:
        ser = serial.Serial(i, baudrate=115200, timeout=1)
        msg = ser.read(16)
        p1 = msg.find('T')
        p2 = msg.find('D')
        if p2 - p1 > 5 and msg.find('=') != -1:
            port = i

    ser = serial.Serial(port, baudrate=115200, timeout=1)

    sendProcess = mp.process(target=sendData, args=(cur, ))
    sendProcess.daemon = True
    sendProcess.start()

    readProcess = mp.process(targer=readData, args=(ser, ))
    readProcess.daemon = True
    readProcess.start()

    duojiProcess = mp.process(target=duoji.setDirection,
                              args=(addirection, sleeptime))
    duojiProcess.daemon = True
    duojiProcess.start()

    main()
Exemplo n.º 28
0
from multiprocessing import process


def hola():
    print("Hola")


def multi():
    print("multi")


def adios():
    print("adios")


if __name__ == "__main__":
    p1 = process(target=hola)
    p2 = process(target=multi)
    p1.start()
    p2.start()
    p1.join()
    p2.join()
Exemplo n.º 29
0
#!/usr/bin/python

import multiprocessing

def now(seconds):
    from datetime import datetime
    from time import sleep
    sleep(seconds)
    print('wait',seconds,'seconds, time is ',datetime.utcnow())
    
if __name__=='__main__':
        import random
        for n in range(3):
            seconds=random.random()
            proc=multiprocessing.process(target=now,args=(seconds,))
            proc.start
            
Exemplo n.º 30
0
def numberOfCPUs(no_of_cpu):
    if no_of_cpu == 2:
        p1 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_list[0], "1"])
        p2 = multiprocessing.process(targer=crack,
                                     args=[chunks_of_password_list[0], "1"])

        p1.start()
        p2.start()

        p1.join()
        p2.join()
        print("Cracking has been completed")

    elif no_of_cpu == 4:
        p1 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[0], "1"])
        p2 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[1], "2"])
        p3 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[2], "3"])
        p4 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "4"])

        p1.start()
        p2.start()
        p3.start()
        p4.start()

        p1.join()  # waits until the process is completed
        p2.join()
        p3.join()
        p4.join()
        print("Cracking has been completed")

    elif no_of_cpu == 6:
        p1 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[0], "1"])
        p2 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[1], "2"])
        p3 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[2], "3"])
        p4 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "4"])
        p5 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "5"])
        p6 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "6"])

        p1.start()
        p2.start()
        p3.start()
        p4.start()
        p5.start()
        p6.start()

        p1.join()  # waits until the process is completed
        p2.join()
        p3.join()
        p4.join()
        p5.join()
        p6.join()
        print("Cracking has been completed")

    else:
        print(
            "Error message: You have {} CPU. This code has been constructed for either 2 or 4 CPU."
            .format(no_of_cpu))
        print(
            "How to fix: Go to line 52-77. I have hardcoded the number of processors to run this. You'll just have to change the if-else statement to cater to your number of cpu."
        )

    # get the list of words from the file
    def list_of_words_from_file(filename):
        print("Opening file: {}".format(filename))
        list_of_words = open(filename, 'r', errors='ignore').readlines()
        print("Stripping breaklines from file: {}".format(filename))
        list_of_words = list(map(str.strip, list_of_words))
        return (list_of_words)

    # get words from the file
    words = list_of_words_from_file(english_password_list)
    # create hash
    create_hash_md5_text_file(words, hashed_words_file, hash_type)

    # END create hash for common passowords

    # START crack passwords
    def chunks(LIST, NUMBER_OF_PARTS):
        # For item i in a range that is a length of l,
        for i in range(0, len(LIST), NUMBER_OF_PARTS):
            # Create an index range for l of n items:
            yield LIST[i:i + NUMBER_OF_PARTS]

    # PASSWORDS FROM PACKET WILL NEED TO BE DUMPED INTO A FILE NAMED PASSWORDS_FROM_PACKETS.TXT
    passwords_from_packet = 'passwords_from_packet.txt'

    no_of_cpu = multiprocessing.cpu_count()

    start = time.perf_counter()

    # OPEN HASHED PASSWORD FILE
    print("Opening file {} ".format(hashed_words_file))
    hashed_password_list = open(passwords_from_packet, 'r').readlines()
    hashed_password_list = list(map(
        str.strip, hashed_password_list))  # strips away breaklines
    finish = time.perf_counter()

    # OPEN PASSWORDS OBTAINED FROM FROM THE PACKET
    print("Opening file {}".format(passwords_from_packet))
    hashed_words_list = open(passwords_from_packet, 'r').readlines()
    hashed_words_list = list(map(str.strip, hashed_words_list))
    finish = time.perf_counter()

    # SPLIT HASHED PASSWORD FILE
    print(
        "This computer has {0} CPU's, starting splitting of passwords into {0} parts"
        .format(no_of_cpu))
    no_of_elements_in_sublist = math.ceil(
        len(hashed_password_list) / no_of_cpu)
    chunks_of_password_list = list(
        chunks(hashed_password_list, no_of_elements_in_sublist))

    # start dictionary attack
    print("Starting dictionary attack on the {} passwords list".format(
        len(chunks_of_password_list)))

    def crack(hashed_password_list, cpu_number):
        number_of_cracked_passwords = 0
        number_of_passwords_scanned = 0

        for hashed_word in hashed_words_list:
            number_of_passwords_scanned += 1
            if hashed_word in hashed_password_list:
                number_of_cracked_passwords += 1
            if number_of_passwords_scanned % 1000 == 0:
                finish = time.perf_counter()
                print(
                    "CPU {}: {}/{} password has been cracked. {} minutes elapsed."
                    .format(cpu_number, number_of_cracked_passwords,
                            len(hashed_password_list),
                            round(finish - start) / 60, 2))

    # executing codes with multiple cores cpus

    if no_of_cpu == 2:
        p1 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_list[0], "1"])
        p2 = multiprocessing.process(targer=crack,
                                     args=[chunks_of_password_list[0], "1"])

        p1.start()
        p2.start()

        p1.join()
        p2.join()
        print("Cracking has been completed")

    elif no_of_cpu == 4:
        p1 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[0], "1"])
        p2 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[1], "2"])
        p3 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[2], "3"])
        p4 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "4"])

        p1.start()
        p2.start()
        p3.start()
        p4.start()

        p1.join()  # waits until the process is completed
        p2.join()
        p3.join()
        p4.join()
        print("Cracking has been completed")

    elif no_of_cpu == 6:
        p1 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[0], "1"])
        p2 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[1], "2"])
        p3 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[2], "3"])
        p4 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "4"])
        p5 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "5"])
        p6 = multiprocessing.Process(target=crack,
                                     args=[chunks_of_password_lists[3], "6"])

        p1.start()
        p2.start()
        p3.start()
        p4.start()
        p5.start()
        p6.start()

        p1.join()  # waits until the process is completed
        p2.join()
        p3.join()
        p4.join()
        p5.join()
        p6.join()
        print("Cracking has been completed")

    else:
        print(
            "Error message: You have {} CPU. This code has been constructed for either 2,4 or 6 CPU."
            .format(no_of_cpu))
        print(
            "How to fix: Go to line 52-77. I have hardcoded the number of processors to run this. You'll just have to change the if-else statement to cater to your number of cpu."
        )
Exemplo n.º 31
0
 def _terminate(self):
     if HAS_MULTIPROCESSING:
         for p in processes():
             if not p == process():
                 p.terminate()
                 p.join(3)
Exemplo n.º 32
0
def server(killfile,mode):
 killcheck(killfile,mode)
 defaults=config()
 defaults_dict={'log':False,'host':False,'port':False}
 if defaults != False:
     for i in defaults.keys():
         defaults_dict[i]=True
 #for testing purposes only 

 cmdArg=cmdline()
 if cmdArg.log:
     print("alternate log file: ",cmdArg.log)
     ofile=cmdArg.log
 elif defaults_dict['log'] == True:
     try:
         ofile=defaults['log']
     except:
         print("something went wrong and the "+file+"specified log file could not be used! Prompting user for log file!")
         try:
             ofile=input("log file: ")
         except:
             print("Quick! where is the nearest desk to head bang?! the user prompted file failed for some reason! Attempting to use internal Hard Coded log file name!")
             try:
                 ofile="./uname.dat"
             except:
                 print("[Rips eyeballs out of sockets] The internal Hard Coded log file failed to set as well. F**k your GD environment! If it failed at this stage, something is really wrong")
                 sys.exit()

 else:
     ofile="./uname.dat"
 if not os.path.exists(ofile):
     storage=open(ofile,"wb")
     storage.write("".encode())
     storage.close()
  
 if cmdArg.host:
     print("alternate Host Address: ",cmdArg.host)
     host=cmdArg.host
 elif defaults_dict['host'] == True:
     try:
         host=defaults['host']
     except:
         print("something went wrong and the "+file+"specified host address could not be used! Prompting user for address!")
         try:
             host=input("host address: ")
         except:
                print("Well f**k, something else is wrong! Using internal Hard Coded default (127.0.0.1)")
                try:
                    host='127.0.0.1'
                except:
                    print("hhmmmmm... what the ????ing h???... okay it seems that the internal Hard Coded address will not work either. Exiting with a Gun to the head [ once out of sight, shoots self in the head ]")
                    sys.exit()
 else:
    host='127.0.0.1'

 if cmdArg.port:
     try:
         print("alternate server port: ",cmdArg.port)
         port=int(cmdArg.port)
     except:
         port=9998
 elif defaults_dict['port']:
     try:
         port=int(defaults['port'])
         print("alternate server port: ",defaults['port'],"["+file+"]")
     except:
         print("something went wrong and the defaults.cfg specified port could not be used! Using internal Hard-Coded default (9998)")
         try:
             port=9998
         except:
             print("something else is wrong, now prompting user for port!")
             try:
                 port=int(input("port: "))
             except:
                 print("oh boy!!! something is really wrong... port value prompt has failed too! Exitting like a little bitch!")
                 sys.exit()
 else:
     port=9998
 
 hostPort=(host,port)
 cmdport=port+1
 

 try:
     server=socket.socket()
 except:
     print("failed to create a socket")
     sys.exit()
 try:
     server.bind(hostPort)
 except:
     print("failed to bind ",hostPort,"to socket!")
     sys.exit()

 print("server started on ",hostPort[0],":",hostPort[1])
 try:
     server.listen(20)
 except:
     print("failed at the server.listen() stage")
     sys.exit()
 #conn,addr = server.accept()
 #print("connection from :",str(addr))
 while True:
  killcheck(killfile,mode)
  try:
      conn,addr = server.accept()
      print("connection from :",str(addr))
  except:
      print("failed at the server.accept() stage")
      sys.exit()
  try:
      data=conn.recv(1024)
  except:
      print("failed at the conn.recv() stage")
      sys.exit()
  
  #if not data:
  # break
  try:
      try:
         storage=open(ofile,"ab")
      except:
         print("well this is embarrassing, I cannot open a file to write to!")
         sys.exit()
      print("data from connect client: ",data)
      storage.write(data)
      storage.close()
  except:
      print("failed at the storage.write() stage")
      sys.exit()

  try:
      conn.send(b"data recorded [ probably, needs a checking function ]")
  except:
      print("failed at the conn.send() stage")
      sys.exit()
 # multiprocess here
  cmd=process(target=rotate,args=(host,cmdport,ofile))
  cmd.start()
  conn.close()