Ejemplo n.º 1
0
def filecz():
    try:
        f = open(r'')
        print(f.read())
    finally:
        if f:
            f.close()

    with open(r'', 'r') as filereader:
        print(filereader.read())
        for line in filereader.readlines():
            print(line.strip())

    print(os.getpid())
    if __name__ == '__main__':
        print(os.getpid())
        for i in range(5):
            p = process(target=filecz, args=(str(i), ))
            p.start()
        p.join()
    time.sleep(random.random() * 3)
    if __name__ == '__main__':
        p = pool(process=3)
        for i in range(5):
            p.apply_async(filecz, args=(i, ))
        p.close()
        p.join()
Ejemplo n.º 2
0
def cpmap(cores=1):
    """Configurable parallel map context manager.

    Returns appropriate map compatible function based on configuration:
    - Local single core (the default)
    - Multiple local cores
    """
    if int(cores) == 1:
        yield itertools.imap
    else:
        if multiprocessing is None:
            raise ImportError("multiprocessing not available")
        # Fix to allow keyboard interrupts in multiprocessing: https://gist.github.com/626518
        def wrapper(func):
            def wrap(self, timeout=None):
                return func(self, timeout=timeout if timeout is not None else 1e100)
            return wrap
        IMapIterator.next = wrapper(IMapIterator.next)
        # recycle threads on Python 2.7; remain compatible with Python 2.6
        try:
            pool = multiprocessing.pool(int(cores), maxtasksperchild=None)
        except TypeError:
            pool = multiprocessing.Pool(int(cores))
        yield pool.imap_unordered
        pool.terminate()
Ejemplo n.º 3
0
def save_model_picture_multiprocessing(train_list, train_numpy):
    '''
    too slow, use pool in multiprocessing
    save GMM model for every picture, for we have split the train and test set, do it split
    :param train_list:
    :param train_numpy:
    :return: every picture model
    '''
    picture_list = []
    save_path_list = []
    start_time = time.time()
    for i, train_path in enumerate(train_list):
        picture = train_numpy[i]
        #     train_path:1000/1000 so we should create directory 1000 first
        save_dir = 'model_picture/' + train_path.split('/')[0]
        if not os.path.exists(save_dir):
            os.mkdir(save_dir)
        save_path = 'model_picture/' + train_path + '.model'
        save_path_list.append(save_path)
        picture_list.append(picture)

    picture_save_path = zip(picture_list, save_path_list)
    p = pool(32)
    p.map(save_model_picture_for_multipro, picture_save_path)
    p.close()
    p.join()
    end_time = time.time()
    print('time for 10 picture is {}'.format(end_time - start_time))
Ejemplo n.º 4
0
 def set_pool(self, name: str, *args, **kwargs):
     """
     создать пул
     """
     pool = self.pools[name]
     pool = pool(*args, **kwargs)
     self.pool = pool
     return pool
Ejemplo n.º 5
0
def main():
    how_many = 50
    p = pool(processes=how_many)
    parse_us = [random_starting_url() for _ in range(how_many)]
    data = p.map(get_links [link for link in parse_us])
    data = [url for url_list in data for url in url_list]
    p.close()

    with open('D:\\intermediate\\urls.txt', 'w') as f:
        f.write(str(data))
Ejemplo n.º 6
0
 def thread_assign(self):
     mobile = 130
     ls: list = []
     for i in range(10):
         ls.append(mobile + i)
     p = pool(processes=self.assign_num)
     ls1 = p.map(self.num_assign, ls)
     p.close()
     p.join()
     print(ls1)
Ejemplo n.º 7
0
 def __init__(self):
     indices = []
     for i in range(1000):
         for j in range(1000):
             for k in range(1000):
                 indices.append((i, j, k))
     start_time = time.time()
     p = multiprocessing.pool(multiprocessing.cpu_count())
     p.map(self.calcaulte, indices)
     p.close()
     p.join()
     print('time consumed is {}'.format(time.time() - start_time))
Ejemplo n.º 8
0
 def proc_data(self, func, tt):
     print tt
     po = pool(self.processor_count)#mp.cpu_count()
     multiple_results = [po.apply_async(func, (self.divData[i],self.resData[i])) for i in range(self.processor_count)]
     #[res.get(timeout=None) for res in multiple_results]
     for res in multiple_results:
         res.get(timeout=None)
     
     #p = pro(target=func, args=(self.divData[0], self.resultData[0]))
     #p.start()
     #p.join()
     
     pass
Ejemplo n.º 9
0
def Gradual_Parallel_MST(ss, idx, Scenario_Outcomes, mst):
    subsets = []
    subset1 = [
        s for s in ss if str(scenario_outcomes[s][idx]) in list(cvar[1])
    ]
    subset2 = [
        s for s in ss if str(scenario_outcomes[s][idx]) not in list(cvar[1])
    ]
    if subset2 != []:
        new_sets.append(subset2)
        subsets.append(subset2)

    if len(list(cvar[1])) > 1:
        sortable = [(s, scenario_outcomes[s][idx]) for s in subset1]
        subset3 = groupby_func(sortable, key=itemgetter(1))
        if subset3 != []:
            new_sets.append(subset3)
            subsets.append(subset3)
    else:
        if subset1 != []:
            new_sets.append(subset1)
            subsets.append(subset1)

    ### generate multi-process object and run mst subset gen

    if len(sets) < mp.cpu_count():
        np = len(sets)
    else:
        np = mp.cpu_count()

    ### create pool
    pool = mp.pool(np)

    ### run pool
    results = [
        pool.apply_async(subset_mst, args=(ss, scenario_outcomes, mst))
        for ss in subsets
    ]
    pool.close()
    pool.join()

    NAC_to_add = {}
    NAC_to_add = NAC_to_add.union(
        *[results[i]._value for i in range(len(results))])

    return NAC_to_add, subsets
Ejemplo n.º 10
0
def main():

    path = "/home/pi/data/{}".format(uuid.uuid4())

    os.mkdir(path)

    p = pool(20)
    i2cq = Queue(maxsize=200)
    gpsq = Queue(maxsize=4)
    p.apply_async(readGPS, gpsq)
    p.apply_async(readI2c, i2cq)

    time.sleep(200)

    while True:
        data = list()
        for i in range(0, 500):
            currData = makeCurrData(i2cq, gpsq)
            data.append(currData)

        print(data)
Ejemplo n.º 11
0
    def get_img(self, num_threads=None, img=True, dcm=True):
        print("Extracting images from LUNA16 Dataset")

        for subdir in self.subsetdirs:
            src_dir = os.path.join(self.data_path, subdir)
            src_paths = glob(src_dir + '/*.mhd')

            if num_threads:
                pool = multiprocessing.pool(num_threads)
                pool.map(self.process_image, src_paths)

            else:
                for src_path in src_paths:
                    print("src Path: ", src_path)
                    img_array, patient_id = self.process_image(src_path)
                    if self.img:
                        #print(self.img_path)
                        #dst_dir = os.path.join(self.img_path,str(patient_id))
                        dst_dir = self.img_path + '/' + str(patient_id) + '/'
                        self.writetoimg(img_array, dst_dir)
                    if self.mhd:
                        dst_dir = self.mhd_path + '/' + str(patient_id)
                        self.writetomhd(img_array, dst_dir)
Ejemplo n.º 12
0
def run_iteration():
    start = time.time()

    # 超时设置
    # 这两种设置都进行才有效。如果超时,会报错(若不设置,则超时多久都不报错)
    document.set_page_load_timeout(5)
    document.set_script_timeout(5)

    p = pool()

    global bank_info
    bank_info = p.map(itr_banks, range(len(bank_sites)))
    # flatten the list using sum
    bank_info = sum(bank_info, [])

    p.close()
    p.join()
    end = time.time()
    print("Total time used on fetching: %0.3f" % (end - start))

    # a new manipulation will be needed if new bank's exchange rates are added to  bank_info
    for e in bank_info:  # normalize
        e[0] = re.sub("\(.*\)", "", e[0])
        e[1] = re.sub("\(.*\)", "", e[1])
        e[0] = re.sub("/.*", "", e[0])
        e[1] = re.sub("/.*", "", e[1])
        e[3] = 1.0 / e[3]
    # !!!!!
    # CAUTION THERE MAY BE LOSSES OF WORD 现钞,现汇

    for item in bank_info:
        name0 = item[0]
        name1 = item[1]
        if name0 not in currencies:
            currencies.append(name0)
        if name1 not in currencies:
            currencies.append(name1)
Ejemplo n.º 13
0
def main():
    client = InfluxDBClient(host=myhost,
                            port=myport,
                            username=myuser,
                            password=mypass,
                            ssl=True,
                            verify_ssl=True)
    databases = client.get_list_database()

    for every_db in databases:
        container[every_db['name']] = {}  # assign database name - 1-st level;
        client.switch_database(every_db['name'])
        try:
            print(f'Get measurements for [{every_db}] database')
            measurements = client.get_list_measurements()
            # print(measurements)
        except Exception as e:
            print(
                f'ERROR for database [{every_db}] of get measurements as: [{e}]'
            )
            continue

        for every_meas in measurements:
            container[every_db['name']][every_meas['name']] = {
            }  # assign measure name - 2-nd level;

    # create temporary working object to obtain fields:
    working_list = []
    for each_db in container:
        for each_meas in container[each_db]:
            temp_object = {
                'host': myhost,
                'port': myport,
                'user': myuser,
                'pass': mypass,
                'database': each_db,
                'measure': each_meas
            }
            working_list.append(temp_object)

    ############################################################################
    # get all fields of influx and put them into temporary storage
    with pool(
            len(working_list)
            if len(working_list) < maxconnections else maxconnections) as p:
        get_fields_list = p.map(call_fields, working_list)
        get_tags_list = p.map(call_tags, working_list)
    del p  # remove pool of workers

    # move info about fields into main storage variable
    for each_record in get_fields_list:
        db_name = list(each_record.keys())[0]
        # print("f> "+db_name) # debug
        meas_name = list(each_record.get(db_name))[0]
        # print("f>> "+meas_name) # debug
        container[db_name][meas_name]['fields_list'] = [
        ]  # set empty array for received fields
        container[db_name][meas_name]['fields'] = {
        }  # set empty dict for store info separately
        try:
            for field in each_record[db_name][meas_name]:
                field_name = field[0]
                # print("f>>> "+field_name) # debug
                container[db_name][meas_name]['fields_list'].append(field[0])
                container[db_name][meas_name]['fields'][field_name] = {}
                container[db_name][meas_name]['fields'][field_name][
                    'name'] = field[0]
                container[db_name][meas_name]['fields'][field_name][
                    'type'] = field[1]
        except Exception as e:
            print(
                f'ERROR - catch during field iteration in working_list for field [{field}]'
            )
            print(e)
            continue

    del get_fields_list  # clean temporary storage;

    # move info about tags into main storage variable
    for each_record in get_tags_list:
        db_name = list(each_record.keys())[0]
        # print("t> "+db_name) # debug
        meas_name = list(each_record.get(db_name))[0]
        # print("t>> "+meas_name) # debug
        container[db_name][meas_name]['tags_list'] = [
        ]  # set empty array for received tags
        container[db_name][meas_name]['tags'] = {
        }  # set empty dict for store info separately
        try:

            for tag in each_record[db_name][meas_name]:
                tag_name = tag[0]
                # print("t>>> "+tag_name) # debug
                container[db_name][meas_name]['tags_list'].append(tag_name)
                container[db_name][meas_name]['tags'][tag_name] = {}
                container[db_name][meas_name]['tags'][tag_name]['name'] = tag[
                    0]

        except Exception as e:
            print(
                f'ERROR - catch during tags iteration in working_list for tag [{tag}]'
            )
            print(e)
            continue

    del get_tags_list  # clean temporary storage;

    # fill temporary storage for retrive last record to fields
    working_list = []
    for each_db in container:
        # print(">"+str(container[each_db])) # debug
        for each_meas in container[each_db]:
            # print(">>"+str(container[each_db][each_meas])) # debug
            # print(each_db + " > " + each_meas)
            # print(container[each_db][each_meas])
            for each_field in container[each_db][each_meas]['fields_list']:
                # print('## each_ms = '+str(each_ms))
                # print(">>>"+str(container[each_db][each_meas]['fields_list'])) # debug
                temp_object = {
                    'host': myhost,
                    'port': myport,
                    'user': myuser,
                    'pass': mypass,
                    'database': each_db,
                    'measure': each_meas,
                    'field': each_field
                }
                working_list.append(temp_object)

    # get info about last record to every field time/value:
    with pool(
            len(working_list)
            if len(working_list) < maxconnections else maxconnections) as p:
        get_last_records = p.map(call_query, working_list)

    # move received info into main storage:
    for each_record in get_last_records:
        db_name = list(each_record.keys())[0]
        meas_name = list(each_record.get(db_name))[0]
        # print(db_name)  # debug
        # print(meas_name)  # debug
        try:
            for field in each_record[db_name][meas_name]:
                # print(field) # debug
                field_last_time = each_record[db_name][meas_name][field][
                    'time']
                field_last_value = each_record[db_name][meas_name][field][
                    'value']
                # move it to CONTAINER
                container[db_name][meas_name]['fields'][field][
                    'last_time'] = field_last_time
                container[db_name][meas_name]['fields'][field][
                    'last_value'] = field_last_value
        except Exception as e:
            print(
                f'ERROR - parsing working_list for last time record = field [{field}]'
            )
            print(e)
            continue
    del get_last_records  # clean temporary storage

    # create and fill html report of all this stuff:
    report = open(report_file, 'w')
    report.write(
        html_startpart)  # write first part for all css style and basic stuff;
    for db in container:
        print(db)
        for meas in container[db]:
            print(meas)
            report.write('<div class="row">')
            report.write(f'<div class="data_db"><span>{db}</span></div>'
                         )  # end of database block (1-st column);
            report.write(
                f'<div class="data_meas"><span>{meas}</span></div><div class="data_tags">'
            )  # end of measurement block (2-nd column)

            try:
                for tag in container[db][meas]['tags_list']:
                    report.write(f'<span>{tag}</span>')
            except Exception as e:
                print(
                    f'ERROR - Processing [{meas}] in [{db}] and try iterate over ["tags_list"] and catch [{e}]'
                )

            report.write('</div><div class="data_fields">'
                         )  # end of tags block (3-rd column);

            try:
                for field in container[db][meas]['fields_list']:
                    report.write(f'<span>{field}</span>')
            except Exception as e:
                print(
                    f'Processing [{meas}] in [{db}] and try iterate over ["fields_list"] and catch [{e}]'
                )

            report.write('</div><div class="field_last_record">'
                         )  # end for fields block (4-th column);
            # report.write('</div>') # end for fields block (4-th column);

            try:
                for field in container[db][meas]['fields_list']:
                    last_time = container[db][meas]['fields'][field][
                        'last_time']
                    last_value = container[db][meas]['fields'][field][
                        'last_value']
                    # report.write(f'<span>[{field}]=>[{last_time}]=[{last_value}]</span>')
                    report.write(f'<span>[{last_time}]</span>')
            except Exception as e:
                print(
                    f'Processing [{meas}] in [{db}] and try iterate over ["fields_list"] to put LAST time/value and catch [{e}]'
                )

            report.write('</div>')  # end of last record block
            report.write('</div>')  # end for row block

    report.write(html_endpart)  # write final closed tags;
    report.close()
    return parser.parse_args().image, parser.parse_args(
    ).output, parser.parse_args().wordlist


def steghide(password):
    from subprocess import call, DEVNULL
    cmd = 'steghide extract -sf {0} -fx {1} -p {2}', format(
        image, output, password)
    if call(cmd.split(), stdout=DEVNULL, stderr=DEVNULL) == 0:
        print('[=] Password: {}\n[Ctrl + C] to stop'.format(password))


if __name__ == '__main__':

    banner()
    image, output, wordlist = parseArgs()
    pool = pool()
    start = time()
    pool.map(
        steghide,
        [password.rstrip() for password in open(wordlist, errors='ignore')])
    totalTime = time() - start
    timeFormat = 'seconds'
    if (totalTime >= 60):
        totalTime = totalTime / 60
        timeFormat = 'minutes'
        if (totalTime >= 3600):
            totalTime = totalTime / 60
            timeFormat = 'hours'
    print('[=] Finished : {0:.f2} {1}'.format(totalTime, timeFormat))
Ejemplo n.º 15
0
                          'Collection': collection,
                          'PatientID': PatientID,
                          'StudyInstanceUID': StudyInstanceUID,
                          'format': 'JSON'
                      })
    return ujson.loads(response.text)


def getMRI(SeriesInstanceUID, filename):
    response = rq.get(API_ADDRESS + 'getImage/',
                      headers={
                          'X-Mashape-Key': PROD_MASHAPE_API_KEY,
                          'api_key': TCIA_API_KEY,
                          'Accept': 'application/json'
                      },
                      params={'SeriesInstanceUID': SeriesInstanceUID},
                      stream=True)
    with open(filename, 'wb') as f:
        for chunk in response.iter_content(chunk_size=1024):
            if chunk:  # filter out keep-alive new chunks
                f.write(chunk)


pool = mp.pool(mp.cpu_count)
study = getStudy(STUDY)
print(study)
series = getSeries(study[0]['PatientID'],
                   study[0]['StudyInstanceUID'],
                   collection=STUDY)
print(series)
Ejemplo n.º 16
0
                'Accept': 'application/json'}, params={'Collection': collection,
                    'format': 'JSON'})
    return ujson.loads(response.text)

def getSeries(PatientID, StudyInstanceUID, collection=STUDY):
    response = rq.get(API_ADDRESS+'getSeries/',
            headers={'X-Mashape-Key': PROD_MASHAPE_API_KEY, 'api_key': TCIA_API_KEY,
                'Accept': 'application/json'}, params={'Collection': collection,
                    'PatientID': PatientID,
                    'StudyInstanceUID': StudyInstanceUID,
                    'format': 'JSON'})
    return ujson.loads(response.text)

def getMRI(SeriesInstanceUID, filename):
    response = rq.get(API_ADDRESS+'getImage/',
            headers={'X-Mashape-Key': PROD_MASHAPE_API_KEY, 'api_key': TCIA_API_KEY,
                'Accept': 'application/json'}, params={
                    'SeriesInstanceUID': SeriesInstanceUID}, stream=True)
    with open(filename, 'wb') as f:
        for chunk in response.iter_content(chunk_size=1024):
            if chunk: # filter out keep-alive new chunks
                f.write(chunk)

pool = mp.pool(mp.cpu_count)
study = getStudy(STUDY)
print(study)
series = getSeries(study[0]['PatientID'], study[0]['StudyInstanceUID'],
        collection=STUDY)
print(series)

Ejemplo n.º 17
0
import multiprocessing as mp
import time
import random
# one core needs data from other core
# need them to communication
# Message Passing Interface MPI
import os


def really_time_consuming(x):
    time.sleep(0.5)
    return random.random()


if __name__ == '__main__':
    fanal_value = 0
    trails = list(range(350))
    myPool = mp.pool(4)

    for i in trails:
        final_value += really_time_consuming(i) 

    expt_results = myPool.map(really_time_consuming, trails)
    final_value = sum(expt_results)
    print(os.get_pid())

Ejemplo n.º 18
0
def main():
    photos = get_photos()
    with pool(workers=10) as process_pool:
        process_pool.map(photos, download_photo)
Ejemplo n.º 19
0
def networker(routers):
    workers = pool(6)
    workers.map(mac_ip, map(lambda x: x[0], routers))
    workers.close()
Ejemplo n.º 20
0
        process = globals().get(argv[1])
    except IndexError:
        print "Must supply a process to run."
        exit(0)
    except AttributeError:
        print argv[1], "isn't a process."
        exit(0)

    if argv[-1] == 'loop':
        runs.pop(-1)
        print "looping..."
        loop(process, runs)
    elif argv[-1] == 'pool':
        runs.pop(-1)
        print "multiprocessing"
        pool(process, runs)
    elif argv[-1] == 'fast':
        processors = 12
        pool(process, runs)
    elif argv[-1] == 'all':
        pool(process, runs)
    elif 'r11_' in argv[-1] and len(argv) == 3:
        print "straight processing..."
        process(argv[-1])
    elif 'r11_' in argv[-1]:
        print "multiprocessing..."
        pool(process, runs)
    else:
        print "I'm sorry, Aaron. I'm afraid I can't do that."
        exit('goodbye')
Ejemplo n.º 21
0
from multiprocessing import pool
import time

CONT = 500000000


def contdown(n):
    while n > 0:
        n -= 1


if __name__ == '__main':
    pool = pool(processes=2)
    start = time.time()
    r1 = pool.applay_asnc(contdown, [CONT // 2])
    r2 = pool.applay_asnc(contdown, [CONT // 2])
    pool.close()
    pool.join()
    end = time.time()
    print('time taken in second-', end - start)