Beispiel #1
0
                    exit_nokeymeta_lons.append(str(meta[1]))
                    exit_nokeymeta_pg.append(process_group)
                elif exit_code == 'resolution':
                    exit_resolution_refs.append(valid_refs[c])
                    exit_resolution_lats.append(str(meta[0]))
                    exit_resolution_lons.append(str(meta[1]))
                    exit_resolution_pg.append(process_group)
                elif exit_code == 'badmeasurementmethod':
                    exit_badmeasurementmethod_refs.append(valid_refs[c])
                    exit_badmeasurementmethod_lats.append(str(meta[0]))
                    exit_badmeasurementmethod_lons.append(str(meta[1]))
                    exit_badmeasurementmethod_pg.append(process_group)

        print valid_refs[c]
        modules.write_out_data(valid_refs[c], process_group, root_grp, species,
                               full_data, p_st_grid, p_mm_grid, data_valid,
                               meta, n_dup)

elif run_type == 'parallel':
    if __name__ == '__main__':
        pool = multiprocessing.Pool(processes=16)
        results = [
            pool.apply_async(site_iter_process, (valid_refs, c))
            for c in range(len(valid_refs))
        ]
        big_array = [r.get() for r in results]
        pool.terminate()

    indices_array = []
    full_data_array = []
    p_st_grid_array = []
if run_type == "serial":
    for c in range(len(valid_refs)):
        c, full_data, data_valid, lat, lon, alt, raw_class_name, anthrome_class_name, mm, st, file_res = site_iter_process(
            valid_refs, c
        )
        modules.write_out_data(
            valid_refs[c],
            process_group,
            root_grp,
            species,
            full_data,
            output_res,
            lat,
            lon,
            alt,
            raw_class_name,
            anthrome_class_name,
            mm,
            st,
            file_res,
            output_res_times,
            obs_time_pd,
            data_valid,
        )

elif run_type == "parallel":
    if __name__ == "__main__":
        pool = multiprocessing.Pool(processes=16)
        results = [pool.apply_async(site_iter_process, (valid_refs, c)) for c in range(len(valid_refs))]
        big_array = [r.get() for r in results]
Beispiel #3
0
        p_unit = chunk_group.processed_units
        file_res = chunk_group.native_resolution
        flask_flag = chunk_group.flask_flag

        if flask_flag == 'Yes':
            all_st = [-1] * (len(data) - 1)
            all_st.append(3)
        else:
            all_st = [-1] * (len(data))

        meta = [
            lat, lon, alt, raw_class_name, file_res, unit, p_unit, data_tz,
            local_tz, site_name, country, contact
        ]

        modules.write_out_data(site_ref, 'EPA AQS', root_grp, species, data,
                               all_st, all_mm, True, meta, n_dup)

    #add counts from each file
    alli1 = alli1 + chunk_read.variables['invalid_nometa_count'][0]
    alli2 = alli2 + chunk_read.variables['invalid_anyvaliddata_count'][0]
    alli3 = alli3 + chunk_read.variables['invalid_nokeymeta_count'][0]
    alli4 = alli4 + chunk_read.variables['invalid_resolution_count'][0]
    alli5 = alli5 + chunk_read.variables['invalid_badmeasurementmethod_count'][
        0]

    alln1 = alln1 + chunk_read.variables['n_obs_all'][0]
    alln2 = alln2 + chunk_read.variables['n_obs_after_nometa'][0]
    alln3 = alln3 + chunk_read.variables['n_obs_after_flagsandlod'][0]
    alln4 = alln4 + chunk_read.variables['n_obs_after_duplicate'][0]
    alln5 = alln5 + chunk_read.variables['n_obs_after_anyvaliddata'][0]
    alln6 = alln6 + chunk_read.variables['n_obs_after_nokeymeta'][0]