Ejemplo n.º 1
0
 def GridSearch(self, params):
     if __name__ == '__main__':
         models=[]
         with ppe(max_workers = len(params)) as pool:
             for param in params:
                 models.append(NeuralNet(self.layers, self.X, self.y, self.ac_funcs))
                 pool.submit(models[-1].startTraining, batch_size=param['batch_size'], epochs=param['epochs'], alpha=param['alpha'], decay_rate=param['decay_rate'], _lambda=param['_lambda'], keep_prob=param['keep_prob'], print_metrics=False, evaluate=True, X_test=self.X_test, y_test=self.y_test)
     return models
Ejemplo n.º 2
0
 def RandomizedGridSearch(self, params_range, nb_models):
     if __name__ == '__main__':
         models = []
         params = []
         for i in range(nb_models):
             params.append({
                 'batch_size':
                 int(
                     np.round(np.random.rand() *
                              (params_range['batch_size'][1] -
                               params_range['batch_size'][0]) +
                              params_range['batch_size'][0])),
                 'epochs':
                 int(
                     np.round(np.random.rand() *
                              (params_range['epochs'][1] -
                               params_range['epochs'][0]) +
                              params_range['epochs'][0])),
                 'alpha':
                 10**(np.random.rand() *
                      (np.log10(params_range['alpha'][1]) -
                       np.log10(params_range['alpha'][0])) +
                      np.log10(params_range['alpha'][0])),
                 'decay_rate':
                 10**(np.random.rand() *
                      (np.log10(params_range['decay_rate'][1]) -
                       np.log10(params_range['decay_rate'][0])) +
                      np.log10(params_range['decay_rate'][0])),
                 '_lambda':
                 10**(np.random.rand() *
                      (np.log10(params_range['_lambda'][1]) -
                       np.log10(params_range['_lambda'][0])) +
                      np.log10(params_range['_lambda'][0])),
                 'keep_prob':
                 [(np.random.rand() * (params_range['keep_prob'][1] -
                                       params_range['keep_prob'][0]) +
                   params_range['keep_prob'][0])
                  for j in range(len(self.layers) - 1)]
             })
         with ppe(max_workers=len(params)) as pool:
             for param in params:
                 models.append(
                     NeuralNet(self.layers, self.X, self.y, self.ac_funcs))
                 pool.submit(models[-1].startTraining,
                             batch_size=param['batch_size'],
                             epochs=param['epochs'],
                             alpha=param['alpha'],
                             decay_rate=param['decay_rate'],
                             _lambda=param['_lambda'],
                             keep_prob=param['keep_prob'],
                             print_metrics=False,
                             evaluate=True,
                             X_test=self.X_test,
                             y_test=self.y_test)
Ejemplo n.º 3
0
def word_freq(sentence: str) -> dict:
    """histogram of words in a sentence."""
    hist = {}
    words = word_tokenize(sentence)
    with ppe(max_workers=4) as exe:
        res = exe.map(clean_words, words)
    clean_list = list(filter(None, res))
    for word in clean_list:
        if word not in hist:
            hist[word] = 1
        else:
            hist[word] += 1
    return hist
Ejemplo n.º 4
0
 def load_images(self, path):
     all_images = []
     student_names = os.listdir(path)
     student_names.remove('.ipynb_checkpoints')
     for student in student_names:
         student_image = []
         image_paths = [
             f'{path}/{student}/{i}'
             for i in os.listdir(f'{path}/{student}/') if '.ipynb' not in i
         ]
         with ppe() as e:
             for output in e.map(
                     cv2.imread, image_paths
             ):  #multiprocessing to load all the images from a folder
                 student_image.append(output)
         all_images.append(student_image)
     return all_images, student_names
Ejemplo n.º 5
0
import re
fps = [fp for fp in Path('parsed').glob('*')]


def pmap(fp):
    try:
        obj = json.load(fp.open())
        text, hash, img_url, num_clk, target, tags = obj
        if os.path.exists(f'imgs/{hash}.jpg'):
            return
        print(fp)
        print(obj)
        if re.search(r'^http:', img_url):
            img_url = img_url
        else:
            img_url = 'http:' + img_url

        r = requests.get(img_url)
        print(img_url)
        print(r.status_code)
        if r.status_code == 200:
            img = r.content
            with open(f'imgs/{hash}.jpg', 'wb') as f:
                f.write(img)
    except Exception as ex:
        print(ex)


with ppe(max_workers=96) as exe:
    exe.map(pmap, fps)
Ejemplo n.º 6
0
 def mp_func(*args, **kwargs):
     return ppe().submit(func, *args, **kwargs)
Ejemplo n.º 7
0
def bz2compress(data):
    data = bz2.compress(data)
    return data


def lzmacompress(data):
    data = lzma.compress(data, preset=9)
    return data


#BUF = 8*(1<<10) # 8K
BUF = 1 << 20  # 1M
CPUs = os.cpu_count()

multip = ppe(max_workers=3)

with open(sys.argv[1], 'rb') as inf, open(sys.argv[2], 'wb') as outf:
    i = 0
    while True:
        over = False
        mdata = []

        for _ in range(CPUs):
            data = inf.read(BUF)

            if not data:
                over = True
                break

            mdata.append(data)
Ejemplo n.º 8
0
def by_process(func, items, callback, workers):
    with ppe(max_workers=workers) as executor:
        futures = (executor.submit(func, item) for item in items)
        for future in comp(futures):
            callback(future.result())
Ejemplo n.º 9
0
    for k in range(n):
        write_files('rewritten_files\\' + str(k) + '.txt',
                    get_file_content('given_files\\' + str(k) + '.txt'))


# запис до одного файлу
def write_one_file(k):
    write_files('rewritten_files\\' + str(k) + '.txt',
                get_file_content('given_files\\' + str(k) + '.txt'))


if __name__ == '__main__':

    start = time.time()
    with tpe(8) as executor:
        executor.map(write_one_file, range(1000))

    print('Час виконання ф-ції паралельно за допомогою потоків ThreadPoolExecutor: ',
          time.time() - start, 'c.')

    start = time.time()
    with ppe(8) as executor:
        executor.map(write_one_file, range(1000))

    print('Час виконання ф-ції паралельно за допомогою процесів ProcessPoolExecutor: ',
          time.time() - start, 'c.')

    start = time.time()
    rewrite_files(1000)
    print('Час виконання ф-ції послідовно: ', time.time() - start, 'c.')