Esempio n. 1
0
def cluster(a_file, output_path, outfile, params, logger, min_points=1, **kwargs):
    """
    There is no intermediate ASCII output or temporary file for this code, since all data remains as native Python objects.
    
    """
    logger = logging.getLogger('FlashAutorunLogger')
    
    
    if 'mask_length' in params:
        mask_length = params['mask_length']
    else:
        mask_length = 4
    
    lma=LMAdataFile(a_file, mask_length = mask_length)
    # for line in lma.header:
        # print line

    ctr_lat, ctr_lon, ctr_alt =  params['ctr_lat'], params['ctr_lon'], 0.0

    good = (lma.stations >= params['stations'][0]) & (lma.chi2 <= params['chi2'][1]) 
    if 'alt' in params:
        good = good & (lma.alt < params['alt'][1])
    
    
    data = lma.data[good]
    geoCS = GeographicSystem()
    X,Y,Z = geoCS.toECEF(data['lon'], data['lat'], data['alt'])
    Xc, Yc, Zc = geoCS.toECEF( ctr_lon, ctr_lat, ctr_alt)
    X, Y, Z = X-Xc, Y-Yc, Z-Zc
    
    print "sorting {0} total points".format(data.shape[0])

    D_max, t_max = params['distance'], params['thresh_critical_time'] # m, s
    duration_max = params['thresh_duration']

    IDs = np.arange(X.shape[0])
    X_vector = np.hstack((X[:,None],Y[:,None],Z[:,None])) / D_max
    T_vector = data['time'][:,None] / t_max
    XYZT = np.hstack((X_vector, T_vector))
    
    lma.sort_status = 'in process'
    
    # Maximum 3 s flash length, normalized to the time separation scale

    flash_object_maker = create_flash_objs(lma, data)
    label_aggregator = aggregate_ids(flash_object_maker)
    clusterer = cluster_chunk_pairs(label_aggregator, min_points=min_points)
    chunker = chunk(XYZT[:,-1].min(), duration_max/t_max,  clusterer)
    stream(XYZT.astype('float64'), IDs,chunker)
    flash_object_maker.close()
    
    # These are handled by target.close in each coroutine's GeneratorExit handler
    # clusterer.close()
    # label_aggregator.close()
    # flash_object_maker.close()
    
    print lma.sort_status
    print len(lma.flash_objects)
                    
    return lma, lma.flash_objects
Esempio n. 2
0
def cluster(a_file, output_path, outfile, params, logger, min_points=1, **kwargs):
    """
    There is no intermediate ASCII output or temporary file for this code, since all data remains as native Python objects.
    
    """
    logger = logging.getLogger('FlashAutorunLogger')
    
    
    if 'mask_length' in params:
        mask_length = params['mask_length']
    else:
        mask_length = 4
    
    lma=LMAdataFile(a_file, mask_length = mask_length)
    # for line in lma.header:
        # print line

    ctr_lat, ctr_lon, ctr_alt =  params['ctr_lat'], params['ctr_lon'], 0.0

    good = (lma.stations >= params['stations'][0]) & (lma.chi2 <= params['chi2'][1]) 
    if 'alt' in params:
        good = good & (lma.alt < params['alt'][1])
    
    
    data = lma.data[good]
    geoCS = GeographicSystem()
    X,Y,Z = geoCS.toECEF(data['lon'], data['lat'], data['alt'])
    Xc, Yc, Zc = geoCS.toECEF( ctr_lon, ctr_lat, ctr_alt)
    X, Y, Z = X-Xc, Y-Yc, Z-Zc
    
    print "sorting {0} total points".format(data.shape[0])

    D_max, t_max = 3.0e3, 0.15 # m, s

    IDs = np.arange(X.shape[0])
    X_vector = np.hstack((X[:,None],Y[:,None],Z[:,None])) / D_max
    T_vector = data['time'][:,None] / t_max
    XYZT = np.hstack((X_vector, T_vector))
    
    lma.sort_status = 'in process'
    
    # Maximum 3 s flash length, normalized to the time separation scale

    flash_object_maker = create_flash_objs(lma, data)
    label_aggregator = aggregate_ids(flash_object_maker)
    clusterer = cluster_chunk_pairs(label_aggregator, min_points=min_points)
    chunker = chunk(XYZT[:,-1].min(), 3.0/.15,  clusterer)
    stream(XYZT.astype('float64'), IDs,chunker)
    flash_object_maker.close()
    
    # These are handled by target.close in each coroutine's GeneratorExit handler
    # clusterer.close()
    # label_aggregator.close()
    # flash_object_maker.close()
    
    print lma.sort_status
    print len(lma.flash_objects)
                    
    return lma, lma.flash_objects
Esempio n. 3
0
def cluster(a_file, output_path, outfile, params, logger, min_points=1, **kwargs):
    """
    There is no intermediate ASCII output or temporary file for this code, since all data remains as native Python objects.
    
    """
    logger = logging.getLogger('FlashAutorunLogger')
    
    lma=LMAdataFile(a_file)
    # for line in lma.header:
        # print line

    ctr_lat, ctr_lon, ctr_alt =  params['ctr_lat'], params['ctr_lon'], 0.0

    good = (lma.stations >= params['stations'][0]) & (lma.chi2 <= params['chi2'][1]) 
    if 'alt' in params:
        good = good & (lma.alt < params['alt'][1])
    
    
    data = lma.data[good]
    geoCS = GeographicSystem()
    X,Y,Z = geoCS.toECEF(data['lon'], data['lat'], data['alt'])
    Xc, Yc, Zc = geoCS.toECEF( ctr_lon, ctr_lat, ctr_alt)
    X, Y, Z = X-Xc, Y-Yc, Z-Zc
    
    print("sorting {0} total points".format(data.shape[0]))

    D_max, t_max = params['distance'], params['thresh_critical_time'] # m, s
    duration_max = params['thresh_duration']

    IDs = np.arange(X.shape[0])
    X_vector = np.hstack((X[:,None],Y[:,None],Z[:,None])) / D_max
    T_vector = data['time'][:,None] / t_max
    XYZT = np.hstack((X_vector, T_vector))
    

    
    # Maximum 3 s flash length, normalized to the time separation scale

    flash_object_maker = create_flash_objs(lma, data)
    label_aggregator = aggregate_ids(flash_object_maker)
    clusterer = cluster_chunk_pairs(label_aggregator, min_points=min_points)
    if XYZT.shape[0] < 1:
        # no data, so minimum time is zero. Assume nothing is done with the data,
        # so that time doesn't matter. No flashes can result.
        chunker = chunk(0, duration_max/t_max,  clusterer)
    else:
        chunker = chunk(XYZT[:,-1].min(), duration_max/t_max,  clusterer)
    stream(XYZT.astype('float64'), IDs,chunker)
    flash_object_maker.close()
    
    # These are handled by target.close in each coroutine's GeneratorExit handler
    # clusterer.close()
    # label_aggregator.close()
    # flash_object_maker.close()
    
    print(len(lma.flash_objects))
                    
    return lma, lma.flash_objects
Esempio n. 4
0
    except GeneratorExit:
        print(total)


#lma=LMAdataFile('/Users/ebruning/Documents/Lightning\ interpretation/Flash-length/Thomas/LYLOUT_120412_01817.exported.dat.gz')
#ctr_lat, ctr_lon, ctr_alt =  40.4463980, -104.6368130, 1000.00

lma = LMAdataFile('/data/20040526/LMA/LYLOUT_040526_224000_0600.dat.gz')
# for line in lma.header:
# print line

ctr_lat, ctr_lon, ctr_alt = 35.2791257, -97.9178678, 417.90  # OKLMA
#ctr_lat, ctr_lon, ctr_alt =  40.4463980, -104.6368130, 1000.00 # COLMA
good = (lma.stations >= 6) & (lma.chi2 <= 2.0) & (lma.alt < 20e3)
data = lma.data[good]
geoCS = GeographicSystem()
X, Y, Z = geoCS.toECEF(data['lon'], data['lat'], data['alt'])
Xc, Yc, Zc = geoCS.toECEF(ctr_lon, ctr_lat, ctr_alt)
X, Y, Z = X - Xc, Y - Yc, Z - Zc

D_max, t_max = 3.0e3, 0.15  # m, s

X_vector = np.hstack((X[:, None], Y[:, None], Z[:, None])) / D_max
T_vector = data['time'][:, None] / t_max
XYZT = np.hstack((X_vector, T_vector - T_vector.min()))

# Maximum 3 s flash length, normalized to the time separation scale
chunker = chunk(XYZT[:, -1].min(), 3.0 / .15,
                cluster_chunk_pairs(cluster_printer()))
stream(XYZT.astype('float32'), chunker)
Esempio n. 5
0



#lma=LMAdataFile('/Users/ebruning/Documents/Lightning\ interpretation/Flash-length/Thomas/LYLOUT_120412_01817.exported.dat.gz')
#ctr_lat, ctr_lon, ctr_alt =  40.4463980, -104.6368130, 1000.00

lma=LMAdataFile('/data/20040526/LMA/LYLOUT_040526_224000_0600.dat.gz')
# for line in lma.header:
    # print line

ctr_lat, ctr_lon, ctr_alt =  35.2791257, -97.9178678, 417.90 # OKLMA
#ctr_lat, ctr_lon, ctr_alt =  40.4463980, -104.6368130, 1000.00 # COLMA
good = (lma.stations >= 6) & (lma.chi2 <= 2.0) & (lma.alt < 20e3)
data = lma.data[good]
geoCS = GeographicSystem()
X,Y,Z = geoCS.toECEF(data['lon'], data['lat'], data['alt'])
Xc, Yc, Zc = geoCS.toECEF( ctr_lon, ctr_lat, ctr_alt)
X, Y, Z = X-Xc, Y-Yc, Z-Zc


D_max, t_max = 3.0e3, 0.15 # m, s

X_vector = np.hstack((X[:,None],Y[:,None],Z[:,None])) / D_max
T_vector = data['time'][:,None] / t_max
XYZT = np.hstack((X_vector, T_vector-T_vector.min()))

# Maximum 3 s flash length, normalized to the time separation scale
chunker = chunk(XYZT[:,-1].min(), 3.0/.15, cluster_chunk_pairs(cluster_printer()))
stream(XYZT.astype('float32'),chunker)