Exemple #1
0
    def multiprocessingThreads(self):

        num_process = min(self.num_cpus, self.max_multiprocess)

        p = Pool(num_process)
        DATALISTS = []
        tempmod = len(self.filelist) % (num_process)
        CD = int((len(self.filelist) + 1 + tempmod) / (num_process))
        for i in range(num_process):
            if i == num_process:
                DATALISTS.append(self.filelist[i * CD:-1])
            DATALISTS.append(self.filelist[(i * CD):((i + 1) * CD)])

        try:
            processes = []
            for i in range(num_process):
                # print('wait add process:',i+1,time.clock())
                # print(eval(self.funname),DATALISTS[i])
                MultThread = MyTMultithread(DATALISTS[i], self.delay,
                                            self.funname, self.max_threads)

                p = multiprocessing.Process(target=MultThread.startrun())

                # print('pid & name:',p.pid,p.name)
                processes.append(p)

            for p in processes:
                print('wait join ')
                p.start()

            print('waite over')
        except Exception as e:
            print('error :', e)
        print('end process')
def main():
    """主函数,进行多进程的下载"""
    html = basice(headers)
    po = Pool()
    for i in range(8):
        po = threading.Thread(target=getImg, args=(html, ))
        po.start()
    po.join()
def main():
    """程序主函数"""
    RQ = request(url, headers)
    # print(RQ)
    urls = request_Images(RQ)
    # print(urls)
    dirName = imgDirName(RQ)
    print("目录名称为%s" % dirName)
    po = Pool()
    for i in range(5):
        po = threading.Thread(target=down_Images,
                              args=(urls, headers, dirName))
        po.start()
    po.join()
Exemple #4
0
                     args.maxpercentcellsbelowdensity, activevalidations,
                     args.deletefiles, args.validatefilespath,
                     args.csvresults, args.verbose)
         i += 1
         continue
     startpool = time.time()
     p = Pool(threads)
     for thread in range(0, threads):
         params = (files[i + thread], args.lasversion,
                   args.minimumpointsdensity, args.displayheader,
                   args.cellsize, args.maxpercentcellsbelowdensity,
                   activevalidations, args.deletefiles,
                   args.validatefilespath, args.csvresults, args.verbose)
         p = Process(target=ProcessFile, args=params)
         jobs.append(p)
         p.start()
     for proc in jobs:
         proc.join()
     print('Pool elapsed time: {0:.2f}s'.format(time.time() - startpool))
     i += threads
 while i < len(files):
     params = (files[i], args.lasversion, args.minimumpointsdensity,
               args.displayheader, args.cellsize,
               args.maxpercentcellsbelowdensity, activevalidations,
               args.deletefiles, args.validatefilespath, args.csvresults,
               args.verbose)
     if threads == 1:
         ProcessFile(files[i], args.lasversion, args.minimumpointsdensity,
                     args.displayheader, args.cellsize,
                     args.maxpercentcellsbelowdensity, activevalidations,
                     args.deletefiles, args.validatefilespath,
if __name__ == '__main__':
    lock = Lock()

    all_data = os.listdir(asmfilepath)
    data = []
    for item in all_data:
        if not item.startswith('.'):
            data.append(item)

    
    #threadnumber = 10
    if threadnumber > len(data):
        threadnumber = len(data)
    p = Pool(threadnumber)

    #print data
    elementnumber = len(data)/threadnumber
    chunks = [data[x:x+elementnumber] for x in range(0, len(data), elementnumber)]

    get_feature_attribute()
    # set csv title
    generate_csv()

    #get_feature()
    for idx, chunk in enumerate(chunks):
        p = Process(target=f, args=(lock, chunk, idx))
        p.start()
    p.join()
    print 'Finish'
    #generate_csv()
def main():
    p = Pool(5)
    p.apply(worker, args=(INPUT_TOPIC, FILE_PATH))
    p.start()