예제 #1
0
def distpy_run(fname, configOuterFile, keyword):
    basedir, dirout, jsonConfig, PARALLEL, NCPU, BOX_SIZE, xaxisfile, taxisfile, prf = io_helpers.systemConfig(
        configOuterFile)
    ds.makedir(dirout)
    extended_list = []
    xaxis = None
    try:
        xaxis = numpy.load(xaxisfile)
    except FileNotFoundError:
        print('No x-axis specified')

    configData = {}
    configData['BOX_SIZE'] = 500

    if jsonConfig is not None:
        configFile = io_helpers.json_io(jsonConfig, 1)
        configData = configFile.read()
        # Note this copy - we don't (from a user point-of-view)
        # want to be doing system configuration in the strainrate2summary.json,
        # but we do from an internal point-of-view
        configData['BOX_SIZE'] = BOX_SIZE

    # Currently support ingestion of SEGY & HDF5 plus processing from strainrate2summary
    if keyword == 'strainrate2summary':
        strainrate2summary(fname, xaxis, prf, dirout, configData,
                           copy.deepcopy(extended_list))
    if keyword == 'plotgenerator':
        plotgenerator(basedir, dirout, configData)
    if keyword == 'segy_ingest':
        sgy.SEGYingest(fname, dirout)
    if keyword == 'ingest_h5':
        h5_helpers.ingest_h5(fname, dirout)
예제 #2
0
def main(configOuterFile):
    # standard configuration information
    basedir, dirout, jsonConfig, PARALLEL, NCPU, BOX_SIZE, xaxisfile, taxisfile, prf = io_helpers.systemConfig(
        configOuterFile)
    ds.makedir(dirout)

    configData = {}
    if jsonConfig is not None:
        configFile = io_helpers.json_io(jsonConfig, 1)
        configData = configFile.read()
    #scan for directories
    csv_files = []
    for root, dirs, files in os.walk(basedir):
        for datafile in files:
            csv_files.append(os.path.join(root, datafile))

    if not PARALLEL:
        # parallel does not work in Techlog...
        for fname in csv_files:
            print(fname)
            io_helpers.csv2fbe(fname, dirout, configData)
    else:
        manager = multiprocessing.Manager()
        q = manager.Queue()
        print(multiprocessing.cpu_count())

        pool = multiprocessing.Pool(processes=NCPU, maxtasksperchild=1)

        jobs = []
        for fname in csv_files:
            job = pool.apply_async(io_helpers.csv2fbe,
                                   [fname, dirout, configData])
            print(job)
            jobs.append(job)

        for job in jobs:
            job.get()
            print(job)

        q.put('kill')
        pool.close()
예제 #3
0
def main(configOuterFile):
    basedir,dirout,jsonConfig,PARALLEL,NCPU,BOX_SIZE, xaxisfile,taxisfile,prf = io_helpers.systemConfig(configOuterFile)
    
    xaxis = numpy.load(xaxisfile)

    configFile = io_helpers.json_io(jsonConfig,1)
    configData = configFile.read()

    #scan for directories
    datafiles=[]
    for root, dirs, files in os.walk(basedir):
        for datafile in files:
            if not datafile=='measured_depth.npy':
                datafiles.append(os.path.join(root,datafile))
        # break here because we don't want subdirectories (SLB case)
        break

    if not PARALLEL:
        # parallel does not work in Techlog...
        plotgenerator(basedir, dirout, configData)
    else:
        manager = multiprocessing.Manager()
        q = manager.Queue()
        print(multiprocessing.cpu_count())
        
        pool = multiprocessing.Pool(processes=NCPU, maxtasksperchild=1)

        jobs = []
        for datafile in datafiles:
            job = pool.apply_async(plotgenerator, [datafile, dirout, configData])
            print(job)
            jobs.append(job)

        for job in jobs:
            job.get()
            print(job)

        q.put('kill')
        pool.close()
예제 #4
0
def main(configOuterFile, extended_list=[]):
    basedir,dirout,jsonConfig,PARALLEL,NCPU,BOX_SIZE, xaxisfile,taxisfile,prf = io_helpers.systemConfig(configOuterFile)

    xaxis = None
    try:
        xaxis = numpy.load(xaxisfile)
    except FileNotFoundError:
        print('No x-axis specified')

    taxis = None
    try:
        taxis = numpy.load(taxisfile)
    except FileNotFoundError:
        taxis = None
    

    configFile = io_helpers.json_io(jsonConfig,1)
    configData = configFile.read()
    # Note this copy - we don't (from a user point-of-view)
    # want to be doing system configuration in the strainrate2summary.json,
    # but we do from an internal point-of-view
    configData['BOX_SIZE']=BOX_SIZE
    configData['taxis'] = taxis
    verbose = configData.get('verbose',0)
    if verbose==1:
        print(configData)
 
    #scan for directories
    datafiles=[]
    for root, dirs, files in os.walk(basedir):
        for datafile in files:
            if not datafile=='measured_depth.npy':
                if not datafile=='time.npy':
                    datafiles.append(os.path.join(root,datafile))
        # break here because we don't want subdirectories (SLB case)
        break

    if not PARALLEL:
        # parallel does not work in Techlog...
        for datafile in datafiles:
            print(datafile)
            strainrate2summary(datafile, xaxis, prf, dirout, configData, copy.deepcopy(extended_list))
    else:
        manager = multiprocessing.Manager()
        q = manager.Queue()
        print(multiprocessing.cpu_count())
        
        pool = multiprocessing.Pool(processes=NCPU, maxtasksperchild=1)


        jobs = []
        for datafile in datafiles:
            job = pool.apply_async(strainrate2summary, [datafile, xaxis, prf,dirout, configData,copy.deepcopy(extended_list)])
            print(job)
            jobs.append(job)

        for job in jobs:
            job.get()
            print(job)

        q.put('kill')
        pool.close()
예제 #5
0
def main(configOuterFile, extended_list=[], dataPack={}):
    basedir, dirout, jsonConfig, PARALLEL, NCPU, BOX_SIZE, xaxisfile, taxisfile, prf = io_helpers.systemConfig(
        configOuterFile)

    # in-memory option
    data = dataPack.get('data', None)

    xaxis = None
    taxis = None
    if data is None:
        try:
            xaxis = numpy.load(xaxisfile)
        except FileNotFoundError:
            print('No x-axis specified')
        try:
            taxis = numpy.load(taxisfile)
        except FileNotFoundError:
            taxis = None
    else:
        # in-memory option
        xaxis = dataPack['xaxis']
        unixtime = dataPack['unixtime']
        prf = data.shape[2]

    configFile = io_helpers.json_io(jsonConfig, 1)
    configData = configFile.read()
    # Note this copy - we don't (from a user point-of-view)
    # want to be doing system configuration in the strainrate2summary.json,
    # but we do from an internal point-of-view
    configData['BOX_SIZE'] = BOX_SIZE
    configData['taxis'] = taxis
    verbose = configData.get('verbose', 0)
    if verbose == 1:
        print(configData)

    #scan for directories
    datafiles = []
    if data is None:
        for root, dirs, files in os.walk(basedir):
            for datafile in files:
                if not datafile == 'measured_depth.npy':
                    if not datafile == 'time.npy':
                        datafiles.append(os.path.join(root, datafile))
            # break here because we don't want subdirectories (SLB case)
            break
    else:
        for a in range(data.shape[0]):
            # virtual filename
            datafiles.append(os.path.join(basedir, str(unixtime + a) + '.npy'))

    if not PARALLEL:
        # parallel does not work in Techlog...
        ii = 0
        for datafile in datafiles:
            print(datafile)
            if data is None:
                strainrate2summary(datafile, xaxis, prf, dirout, configData,
                                   copy.deepcopy(extended_list), None)
            else:
                strainrate2summary(datafile, xaxis, prf, dirout, configData,
                                   copy.deepcopy(extended_list),
                                   numpy.squeeze(data[ii, :, :]))
            ii += 1
    else:
        manager = multiprocessing.Manager()
        q = manager.Queue()
        print(multiprocessing.cpu_count())

        pool = multiprocessing.Pool(processes=NCPU, maxtasksperchild=1)

        jobs = []
        ii = 0
        for datafile in datafiles:
            if data is None:
                job = pool.apply_async(strainrate2summary, [
                    datafile, xaxis, prf, dirout, configData,
                    copy.deepcopy(extended_list), None
                ])
            else:
                job = pool.apply_async(strainrate2summary, [
                    datafile, xaxis, prf, dirout, configData,
                    copy.deepcopy(extended_list),
                    numpy.squeeze(data[ii, :, :])
                ])
            ii += 1
            print(job)
            jobs.append(job)

        for job in jobs:
            job.get()
            print(job)

        q.put('kill')
        pool.close()