Ejemplo n.º 1
0
def main_generate_parallel(stream, ref_date,session):
    cdrProcessor=cdr_etl(stream_name,ref_date,session)
    files=cdrProcessor.distribute()
#     cdrProcessor.test()
    #append the class and thread no to the files for each thread
    for i in range (len(files)):
        files[i].append(cdrProcessor)
        files[i].append(i)
    p = mp.Pool(cdrProcessor.stream.no_threads) 
    result=p.map_async(process_thread_parallel , files)
#     result.wait()
    ppp=result.get()
Ejemplo n.º 2
0
def main_generate_parrallel(stream_name, ref_date, session_id):
    cdrProcessor = cdr_etl(stream_name, ref_date, session_id)
    files = cdrProcessor.distribute()
    # append the class and thread no to the files for each thread
    for i in range(len(files)):
        files[i].append(cdrProcessor)
        files[i].append(i)
    p = mp.Pool(cdrProcessor.stream.no_threads)
    result = p.map_async(process_thread_parallel, files)
#     result.wait()
    ppp = result.get()

    for i in range(len(files)):
        generated_files=(files[i][:-2])
        for k in generated_files:
            cdr_file=FileStatu(file_name=k.filename, stream_id=k.stream_id, ref_date=ref_date,
                               record_count=k.no_records, generate_session_id = k.gen_session_id,
                               size=k.file_size
                               #@TODO: convert incoming date to mysql db formartfile_created_time=k.created_date 
                               )
            db.session.add(cdr_file)
            # print k.filename, k.no_records, k.gen_file_name 
        db.session.commit()
Ejemplo n.º 3
0
def main_generate(stream, ref_date):
    cdrProcessor=cdr_etl(stream_name,ref_date)
    files=cdrProcessor.distribute()
    for i in range (len(files)):
        cdrProcessor.process_thread(files[i], i)