def mp_handler(s,t):
    p = multiprocessing.Pool(processes=4,initializer=start_process)
    #x=p.map(multi_run_wrapper,[(s,t)])
    #x=p.map(partial_AnalyzeServer, t, 1)
    x=parmap.map_async(AnalyzeServer, s, t, chunksize=1).get()
    p.close()
    p.join()
    return x
Exemple #2
0
 def test_map_async_noparallel_started_simultaneously_timings(self):
     NTASKS = 4
     items = list(range(NTASKS))
     mytime = time.time()
     # These are started in parallel:
     with parmap.map_async(_wait, items, pm_parallel=False) as compute1:
         elapsed1 = time.time() - mytime
         mytime = time.time()
         with parmap.map_async(_wait, items, pm_parallel=False) as compute2:
             elapsed2 = time.time() - mytime
             mytime = time.time()
             result1 = compute1.get()
             result2 = compute2.get()
             finished = time.time() - mytime
     self.assertTrue(elapsed1 >= NTASKS * TIME_PER_TEST)
     self.assertTrue(elapsed2 >= NTASKS * TIME_PER_TEST)
     self.assertTrue(finished <= 2 * TIME_OVERHEAD)
     self.assertEqual(result1, result2)
     self.assertEqual(result1, items)
Exemple #3
0
 def test_map_async(self):
     NUM_TASKS = 6
     NCPU = 6
     items = range(NUM_TASKS)
     mytime = time.time()
     pfalse = parmap.map_async(_wait, items, pm_parallel=False).get()
     elapsed_false = time.time() - mytime
     mytime = time.time()
     with parmap.map_async(_wait, items, pm_processes=NCPU) as ptrue:
         elap_true_async = time.time() - mytime
         mytime = time.time()
         ptrue_result = ptrue.get()
         elap_true_get = time.time() - mytime
     noparmap = list(items)
     self.assertEqual(pfalse, ptrue_result)
     self.assertEqual(pfalse, noparmap)
     self.assertTrue(elapsed_false > TIME_PER_TEST * (NUM_TASKS - 1))
     self.assertTrue(elap_true_async < TIME_OVERHEAD)
     self.assertTrue(elap_true_get < TIME_PER_TEST * (NUM_TASKS - 1))
Exemple #4
0
 def test_map_async_noparallel_started_simultaneously_timings(self):
     NTASKS = 4
     items = list(range(NTASKS))
     mytime = time.time()
     # These are started in parallel:
     with parmap.map_async(_wait, items, pm_parallel=False) as compute1:
         elapsed1 = time.time() - mytime
         mytime = time.time()
         with parmap.map_async(_wait, items, pm_parallel=False) as compute2:
             elapsed2 = time.time() - mytime
             mytime = time.time()
             result1 = compute1.get()
             result2 = compute2.get()
             finished = time.time() - mytime
     self.assertTrue(elapsed1 >= NTASKS*TIME_PER_TEST)
     self.assertTrue(elapsed2 >= NTASKS*TIME_PER_TEST)
     self.assertTrue(finished <= 2*TIME_OVERHEAD)
     self.assertEqual(result1, result2)
     self.assertEqual(result1, items)
Exemple #5
0
 def test_map_async(self):
     NUM_TASKS = 6
     NCPU = 6
     items = range(NUM_TASKS)
     mytime = time.time()
     pfalse = parmap.map_async(_wait, items, pm_parallel=False).get()
     elapsed_false = time.time() - mytime
     mytime = time.time()
     with parmap.map_async(_wait, items, pm_processes=NCPU) as ptrue:
         elap_true_async = time.time() - mytime
         mytime = time.time()
         ptrue_result = ptrue.get()
         elap_true_get = time.time() - mytime
     noparmap = list(items)
     self.assertEqual(pfalse, ptrue_result)
     self.assertEqual(pfalse, noparmap)
     self.assertTrue(elapsed_false > TIME_PER_TEST*(NUM_TASKS-1))
     self.assertTrue(elap_true_async < TIME_OVERHEAD)
     self.assertTrue(elap_true_get < TIME_PER_TEST*(NUM_TASKS-1))
Exemple #6
0
 def test_map_async_started_simultaneously_timings(self):
     items = list(range(4))
     mytime0 = time.time()
     # These are started in parallel:
     with parmap.map_async(_wait, items, pm_processes=4) as compute1:
         elapsed1 = time.time() - mytime0
         mytime = time.time()
         with parmap.map_async(_wait, items, pm_processes=4) as compute2:
             elapsed2 = time.time() - mytime
             mytime = time.time()
             result1 = compute1.get()
             elapsed3 = time.time() - mytime0
             mytime = time.time()
             result2 = compute2.get()
             elapsed4 = time.time() - mytime0
     self.assertTrue(elapsed1 < TIME_OVERHEAD)
     self.assertTrue(elapsed2 < TIME_OVERHEAD)
     self.assertTrue(elapsed3 < 4 * TIME_PER_TEST + 2 * TIME_OVERHEAD)
     self.assertTrue(elapsed4 < 4 * TIME_PER_TEST + 2 * TIME_OVERHEAD)
     self.assertEqual(result1, result2)
     self.assertEqual(result1, items)
Exemple #7
0
 def test_map_async_started_simultaneously_timings(self):
     items = list(range(4))
     mytime0 = time.time()
     # These are started in parallel:
     with parmap.map_async(_wait, items, pm_processes=4) as compute1:
         elapsed1 = time.time() - mytime0
         mytime = time.time()
         with parmap.map_async(_wait, items, pm_processes=4) as compute2:
             elapsed2 = time.time() - mytime
             mytime = time.time()
             result1 = compute1.get()
             elapsed3 = time.time() - mytime0
             mytime = time.time()
             result2 = compute2.get()
             elapsed4 = time.time() - mytime0
     self.assertTrue(elapsed1 < TIME_OVERHEAD)
     self.assertTrue(elapsed2 < TIME_OVERHEAD)
     self.assertTrue(elapsed3 < 4*TIME_PER_TEST+2*TIME_OVERHEAD)
     self.assertTrue(elapsed4 < 4*TIME_PER_TEST+2*TIME_OVERHEAD)
     self.assertEqual(result1, result2)
     self.assertEqual(result1, items)
Exemple #8
0
async def ingest_get(file,
                     notes=None,
                     solution_id=None,
                     type=None):  # noqa: E501
    """Get network accessible file and execute ingestion

    Get network accessible file and execute ingestion # noqa: E501

    :param file: Location of a network accessible (file, ftp, http, https) file e.g. &#39;file:///usr/src/app/data/eTUFF-sailfish-117259.txt&#39;.
    :type file: str
    :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data)
    :type notes: str
    :param solution_id: Unique numeric identifier for a given tag geolocation dataset solution. solution_id&#x3D;1 is assigned to the primary or approved solution. Incremented solution_id&#39;s are assigned to other positional dataset solutions for a given tag_id and submission_id
    :type solution_id:
    :param type: Type of file to be ingested, defaults to &#39;etuff&#39;
    :type type: str

    :rtype: Union[Ingest200, Tuple[Ingest200, int], Tuple[Ingest200, int, Dict[str, str]]
    """
    start = time.perf_counter()
    data_file = process_get_input_data(file)
    etuff_files = []
    if not data_file.endswith(".txt"):
        etuff_files = unpack_compressed_binary(data_file)
    else:
        etuff_files.append(data_file)
    logger.info("etuff ingestion queue: %s", etuff_files)
    # if synchronous ingestion is desired then use parmap.map
    result = parmap.map_async(
        process_etuff_file,
        etuff_files,
        solution_id=solution_id,
        notes=notes,
        pm_parallel=True,
        pm_processes=cpu_count(),
    )
    finish = time.perf_counter()
    elapsed = round(finish - start, 2)
    return Ingest200.from_dict({
        "code":
        "200",
        "elapsed":
        elapsed,
        "message":
        "Asynchronously ingesting %s file(s) into Tagbase DB." %
        len(etuff_files),
    })