Exemplo n.º 1
0
    def test_make_single_fastq_gz_paired_fwd(self):
        self.params['read-set'] = 'fwd'

        read_sets = [
            ('s1', 'SKB8.640193', './support_files/s1_paired_1.fastq.gz',
             './support_files/s1_paired_2.fastq.gz',
             './support_files/s1_unmatched_1.fastq.gz',
             './support_files/s1_unmatched_2.fastq.gz', None),
            ('s2', 'SKD8.640184', './support_files/s2_paired_1.fastq.gz',
             './support_files/s2_paired_2.fastq.gz',
             './support_files/s2_unmatched_1.fastq.gz',
             './support_files/s2_unmatched_2.fastq.gz', None)
        ]

        out_dir = mkdtemp()
        self._clean_up_files.append(out_dir)

        exp_out = [('s1', 'SKB8.640193', join(out_dir, 's1.fastq.gz')),
                   ('s2', 'SKD8.640184', join(out_dir, 's2.fastq.gz'))]

        obs_out = make_single_fastq_gz(read_sets, out_dir, False)

        self.assertEqual(exp_out, obs_out)

        self.assertTrue(fcmp(obs_out[0][2], './support_files/s1.fwd.fastq.gz'))
        self.assertTrue(fcmp(obs_out[1][2], './support_files/s2.fwd.fastq.gz'))
Exemplo n.º 2
0
def test_update_rapid_input_file():
    """
    Checks RAPID input file update with valid input
    """
    print("TEST 2: UPDATE NAMELIST FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,                          
                         )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc'
                                    )

    original_input_file = os.path.join(INPUT_DATA_PATH, 
                                      "rapid_namelist_valid")
    
    updated_input_file = os.path.join(OUTPUT_DATA_PATH, 
                                      "rapid_namelist-UPDATE")

    copy(original_input_file, updated_input_file)
    rapid_manager.update_namelist_file(updated_input_file)
    updated_input_file_solution = os.path.join(COMPARE_DATA_PATH, 
                                               "rapid_namelist-UPDATE")


    ok_(fcmp(updated_input_file, updated_input_file_solution))
    
    remove_files(updated_input_file)
Exemplo n.º 3
0
def test_generate_rapid_input_file():
    """
    Checks RAPID input file generation with valid input
    """
    print("TEST 1: GENERATE NAMELIST FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,                          
                          ZS_TauR = 24*3600, #duration of routing procedure (time step of runoff data)
                          ZS_dtR = 15*60, #internal routing time step
                          ZS_TauM = 12*24*3600, #total simulation time 
                          ZS_dtM = 24*3600 #input time step 
                         )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc'
                                    )
    generated_input_file = os.path.join(OUTPUT_DATA_PATH, 
                                        "rapid_namelist-GENERATE")
    rapid_manager.generate_namelist_file(generated_input_file)
    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH, 
                                                 "rapid_namelist-GENERATE")


    ok_(fcmp(generated_input_file, generated_input_file_solution))
    
    remove_files(generated_input_file)
Exemplo n.º 4
0
def test_update_rapid_numbers_input_file():
    """
    Checks RAPID input file update with number validation
    """
    print("TEST 4: GENERATE NUMBERS FOR NAMELIST FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,
                          rapid_connect_file=os.path.join(INPUT_DATA_PATH, 'rapid_connect.csv'),
                          riv_bas_id_file=os.path.join(INPUT_DATA_PATH, 'riv_bas_id.csv'),
                         )
    rapid_manager.update_reach_number_data()
                          
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_nasa_lis_3hr_20020830.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc'
                                    )

    generated_input_file = os.path.join(OUTPUT_DATA_PATH, 
                                      "rapid_namelist-GENERATE-NUMBERS")

    rapid_manager.generate_namelist_file(generated_input_file)
                          
    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH, 
                                                 "rapid_namelist-GENERATE-NUMBERS")


    ok_(fcmp(generated_input_file, generated_input_file_solution))
    
    remove_files(generated_input_file)
Exemplo n.º 5
0
def test_update_autoroute_input_file():
    """
    Checks AutoRoute input file generation with valid input
    """
    main_tests_folder = os.path.dirname(os.path.abspath(__file__))
    
    original_data_path = os.path.join(main_tests_folder, 'original')
    output_data_path = os.path.join(main_tests_folder, 'output')

    print "TEST 4: UPDATE VARIABLES IN FILE"
    auto_mng_var = AutoRoute("autoroute_exe_path_dummy",
                             stream_info_file_path="stream_info.txt",
                             dem_raster_file_path="elevation.tif",
                             out_flood_map_shapefile_path="right_here.shp",
                             )

    original_var_input_file = os.path.join(original_data_path, 
                                           "AUTOROUTE_INPUT_FILE-UPDATE_VAR.txt")
    out_var_input_file = os.path.join(output_data_path, 
                                      "AUTOROUTE_INPUT_FILE-UPDATE_VAR.txt")
    copy(original_var_input_file, out_var_input_file)
    auto_mng_var.update_input_file(out_var_input_file)
    updated_input_file_solution = os.path.join(original_data_path, 
                                               "AUTOROUTE_INPUT_FILE-UPDATE_VAR-SOLUTION.txt")
    ok_(fcmp(out_var_input_file, updated_input_file_solution))

    try:
        os.remove(out_var_input_file)
    except OSError:
        pass
Exemplo n.º 6
0
def test_append_slope_to_stream_info_file():
    """
    Checks adding slope to stream info file
    """
    main_tests_folder = os.path.dirname(os.path.abspath(__file__))

    original_data_path = os.path.join(main_tests_folder, 'original')
    output_data_path = os.path.join(main_tests_folder, 'output')

    original_stream_info_file = os.path.join(original_data_path,
                                             'stream_info.txt')
    stream_info_file = os.path.join(output_data_path, 'stream_info.txt')
    copy(original_stream_info_file, stream_info_file)

    print "TEST 2: TEST ADDING SLOPE TO STREAM INFO FILE"
    arp = AutoRoutePrepare(
        "autoroute_exe_path_dummy",
        os.path.join(original_data_path, 'elevation.asc'), stream_info_file,
        os.path.join(original_data_path, 'drainage_line.shp'))

    arp.append_slope_to_stream_info_file()

    ok_(
        fcmp(os.path.join(original_data_path, 'stream_info_solution.txt'),
             stream_info_file))

    try:
        os.remove(stream_info_file)
    except OSError:
        pass
Exemplo n.º 7
0
def test_update_rapid_input_file():
    """
    Checks RAPID input file update with valid input
    """
    print("TEST 2: UPDATE NAMELIST FILE")
    rapid_manager = RAPID(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        use_all_processors=True,
    )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc')

    original_input_file = os.path.join(INPUT_DATA_PATH, "rapid_namelist_valid")

    updated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                      "rapid_namelist-UPDATE")

    copy(original_input_file, updated_input_file)
    rapid_manager.update_namelist_file(updated_input_file)
    updated_input_file_solution = os.path.join(COMPARE_DATA_PATH,
                                               "rapid_namelist-UPDATE")

    assert (fcmp(updated_input_file, updated_input_file_solution))

    remove_files(updated_input_file)
Exemplo n.º 8
0
def test_append_slope_to_stream_info_file():
    """
    Checks adding slope to stream info file
    """
    main_tests_folder = os.path.dirname(os.path.abspath(__file__))
    
    original_data_path = os.path.join(main_tests_folder, 'original')
    output_data_path = os.path.join(main_tests_folder, 'output')

    original_stream_info_file = os.path.join(original_data_path, 'stream_info.txt')
    stream_info_file = os.path.join(output_data_path, 'stream_info.txt')
    copy(original_stream_info_file, stream_info_file)
    
    print "TEST 2: TEST ADDING SLOPE TO STREAM INFO FILE"
    arp = AutoRoutePrepare("autoroute_exe_path_dummy",
                           os.path.join(original_data_path, 'elevation.asc'),
                           stream_info_file,
                           os.path.join(original_data_path, 'drainage_line.shp'))

    arp.append_slope_to_stream_info_file()
                                         
    ok_(fcmp(os.path.join(original_data_path, 'stream_info_solution.txt'), stream_info_file))
    
    try:
        os.remove(stream_info_file)
    except OSError:
        pass
Exemplo n.º 9
0
def test_update_rapid_numbers_input_file():
    """
    Checks RAPID input file update with number validation
    """
    print("TEST 4: GENERATE NUMBERS FOR NAMELIST FILE")
    rapid_manager = RAPID(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        use_all_processors=True,
        rapid_connect_file=os.path.join(INPUT_DATA_PATH, 'rapid_connect.csv'),
        riv_bas_id_file=os.path.join(INPUT_DATA_PATH, 'riv_bas_id.csv'),
    )
    rapid_manager.update_reach_number_data()

    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_nasa_lis_3hr_20020830.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc')

    generated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                        "rapid_namelist-GENERATE-NUMBERS")

    rapid_manager.generate_namelist_file(generated_input_file)

    generated_input_file_solution = os.path.join(
        COMPARE_DATA_PATH, "rapid_namelist-GENERATE-NUMBERS")

    assert (fcmp(generated_input_file, generated_input_file_solution))

    remove_files(generated_input_file)
Exemplo n.º 10
0
def test_generate_rapid_input_file():
    """
    Checks RAPID input file generation with valid input
    """
    print("TEST 1: GENERATE NAMELIST FILE")
    rapid_manager = RAPID(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        use_all_processors=True,
        ZS_TauR=24 *
        3600,  #duration of routing procedure (time step of runoff data)
        ZS_dtR=15 * 60,  #internal routing time step
        ZS_TauM=12 * 24 * 3600,  #total simulation time
        ZS_dtM=24 * 3600  #input time step
    )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc')
    generated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                        "rapid_namelist-GENERATE")
    rapid_manager.generate_namelist_file(generated_input_file)
    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH,
                                                 "rapid_namelist-GENERATE")

    assert (fcmp(generated_input_file, generated_input_file_solution))

    remove_files(generated_input_file)
Exemplo n.º 11
0
def test_generate_autoroute_input_file():
    """
    Checks AutoRoute input file generation with valid input
    """
    main_tests_folder = os.path.dirname(os.path.abspath(__file__))

    original_data_path = os.path.join(main_tests_folder, 'original')
    ouptut_data_path = os.path.join(main_tests_folder, 'output')

    print "TEST 2: GENERATE INPUT FILE"
    auto_mng_gen = AutoRoute("autoroute_exe_path_dummy",
                             stream_info_file_path="stream_info.txt",
                             dem_raster_file_path="elevation.tif",
                             out_flood_map_shapefile_path="right_here.shp",
                             x_section_dist=5000.0,
                             degree_manipulation=3.1,
                             low_spot_range=15,
                             q_limit=1.01,
                             default_manning_n=0.035,
                             degree_interval=1.5)

    generated_input_file = os.path.join(ouptut_data_path,
                                        "AUTOROUTE_INPUT_FILE-GENERATE.txt")
    auto_mng_gen.generate_input_file(generated_input_file)
    generated_input_file_solution = os.path.join(
        original_data_path, "AUTOROUTE_INPUT_FILE-GENERATE-SOLUTION.txt")
    ok_(fcmp(generated_input_file, generated_input_file_solution))

    try:
        os.remove(generated_input_file)
    except OSError:
        pass
Exemplo n.º 12
0
def test_generate_autoroute_input_file():
    """
    Checks AutoRoute input file generation with valid input
    """
    main_tests_folder = os.path.dirname(os.path.abspath(__file__))
    
    original_data_path = os.path.join(main_tests_folder, 'original')
    ouptut_data_path = os.path.join(main_tests_folder, 'output')

    print "TEST 2: GENERATE INPUT FILE"
    auto_mng_gen = AutoRoute("autoroute_exe_path_dummy",
                             stream_info_file_path="stream_info.txt",
                             dem_raster_file_path="elevation.tif",
                             out_flood_map_shapefile_path="right_here.shp",
                             x_section_dist=5000.0,
                             degree_manipulation=3.1,
                             low_spot_range=15,
                             q_limit=1.01,
                             default_manning_n=0.035,
                             degree_interval=1.5
                             )

    generated_input_file = os.path.join(ouptut_data_path, 
                                        "AUTOROUTE_INPUT_FILE-GENERATE.txt")
    auto_mng_gen.generate_input_file(generated_input_file)
    generated_input_file_solution = os.path.join(original_data_path, 
                                                 "AUTOROUTE_INPUT_FILE-GENERATE-SOLUTION.txt")
    ok_(fcmp(generated_input_file, generated_input_file_solution))
    
    try:
        os.remove(generated_input_file)
    except OSError:
        pass
Exemplo n.º 13
0
def test_update_autoroute_input_file():
    """
    Checks AutoRoute input file generation with valid input
    """
    main_tests_folder = os.path.dirname(os.path.abspath(__file__))

    original_data_path = os.path.join(main_tests_folder, 'original')
    output_data_path = os.path.join(main_tests_folder, 'output')

    print "TEST 4: UPDATE VARIABLES IN FILE"
    auto_mng_var = AutoRoute(
        "autoroute_exe_path_dummy",
        stream_info_file_path="stream_info.txt",
        dem_raster_file_path="elevation.tif",
        out_flood_map_shapefile_path="right_here.shp",
    )

    original_var_input_file = os.path.join(
        original_data_path, "AUTOROUTE_INPUT_FILE-UPDATE_VAR.txt")
    out_var_input_file = os.path.join(output_data_path,
                                      "AUTOROUTE_INPUT_FILE-UPDATE_VAR.txt")
    copy(original_var_input_file, out_var_input_file)
    auto_mng_var.update_input_file(out_var_input_file)
    updated_input_file_solution = os.path.join(
        original_data_path, "AUTOROUTE_INPUT_FILE-UPDATE_VAR-SOLUTION.txt")
    ok_(fcmp(out_var_input_file, updated_input_file_solution))

    try:
        os.remove(out_var_input_file)
    except OSError:
        pass
Exemplo n.º 14
0
def main():

    signal.signal(signal.SIGTERM, signal_handler)
    signal.signal(signal.SIGINT, signal_handler)
    signal.signal(signal.SIGABRT, signal_handler)
    open(BACKUPS_COPIED, "a+").write("Starting up process\n")
    while True:
        try:

            log.info("Starting DB Dump to s3 copier ")
            with open("aws_config.yml") as f:
                cfg = yaml.load(f)
                region = cfg["region"]
                access_key = cfg["access_key"]
                secret_key = cfg["secret_key"]
                bucket_name = cfg["bucket_name"]

            dumps = sorted(glob.iglob(BACKUP_DIR + "*.gz"),
                           key=os.path.getctime,
                           reverse=True)

            file_compare = False
            if len(dumps) > 0:
                if len(dumps) > 1:
                    file_compare = fcmp(dumps[0], dumps[1])

                if file_compare:
                    log.info(
                        "the two most recent files are identical, skipping copy. "
                    )
                else:
                    newest_dump = dumps[0]
                    copied_files = open(BACKUPS_COPIED, "r").read().split("\n")
                    if newest_dump in copied_files:
                        log.info("skipping because already copied. ")
                    else:
                        s3 = boto3.resource('s3',
                                            region_name=region,
                                            aws_access_key_id=access_key,
                                            aws_secret_access_key=secret_key)
                        log.info("copying file " + str(newest_dump) +
                                 " to bucket " + str(bucket_name))
                        s3.meta.client.upload_file(
                            newest_dump,
                            bucket_name,
                            basename(newest_dump),
                            Callback=ProgressPercentage(newest_dump))

        except CalledProcessError as cpe:
            log.exception(cpe)

        except InterruptException:
            log.info("Received Signal interrupt")
            sys.exit(0)
        except Exception as e:
            log.exception("Caught exception".format(e))

        log.info("Sleeping for " + str(SLEEP_TIME) + " seconds")
        sleep(SLEEP_TIME)
Exemplo n.º 15
0
def test_goodness_of_fit():
    """
    This tests the goodness of fit functions
    """
    print("TEST 14: TEST GOODNESS OF FIT FUNCTIONS")

    reach_id_file = os.path.join(INPUT_DATA_PATH, 'obs_reach_id.csv')
    observed_file = os.path.join(INPUT_DATA_PATH, 'obs_flow.csv')
    #using CF-compliant file
    cf_input_qout_file = os.path.join(COMPARE_DATA_PATH,
                                      'Qout_nasa_lis_3hr_20020830_CF.nc')
    cf_out_analysis_file = os.path.join(
        OUTPUT_DATA_PATH, 'cf_goodness_of_fit_results-daily.csv')
    find_goodness_of_fit(cf_input_qout_file,
                         reach_id_file,
                         observed_file,
                         cf_out_analysis_file,
                         daily=True)

    cf_goodness_of_fit_file_solution = os.path.join(
        COMPARE_DATA_PATH, 'cf_goodness_of_fit_analysis.csv')
    assert (compare_csv_decimal_files(cf_out_analysis_file,
                                      cf_goodness_of_fit_file_solution))

    reach_id_file = os.path.join(INPUT_DATA_PATH, 'obs_reach_id_1.csv')
    observed_file = os.path.join(INPUT_DATA_PATH, 'obs_flow_1.csv')
    #using CF-compliant file single input
    cf_out_analysis_file_1 = os.path.join(
        OUTPUT_DATA_PATH, 'cf_goodness_of_fit_results_1-daily.csv')
    find_goodness_of_fit(cf_input_qout_file,
                         reach_id_file,
                         observed_file,
                         cf_out_analysis_file_1,
                         daily=True)

    cf_goodness_of_fit_file_solution_1 = os.path.join(
        COMPARE_DATA_PATH, 'cf_goodness_of_fit_analysis_1.csv')
    assert (compare_csv_decimal_files(cf_out_analysis_file_1,
                                      cf_goodness_of_fit_file_solution_1))

    observed_simulated_file = os.path.join(COMPARE_DATA_PATH,
                                           'goodness_of_fit_obs_sim.csv')
    goodness_obs_sim_solution = os.path.join(OUTPUT_DATA_PATH,
                                             'goodness_of_fit_obs_sim.txt')
    # test print goodness of fit to file
    find_goodness_of_fit_csv(observed_simulated_file,
                             out_file=goodness_obs_sim_solution)
    goodness_obs_sim = os.path.join(COMPARE_DATA_PATH,
                                    'goodness_of_fit_obs_sim.txt')
    assert (fcmp(goodness_obs_sim, goodness_obs_sim_solution))
    # test print goodness of fit to console
    find_goodness_of_fit_csv(observed_simulated_file)

    remove_files(cf_out_analysis_file, cf_out_analysis_file_1)
Exemplo n.º 16
0
def ssu():
    d = os.path.join(_CONIFG_PATH, 'default.json')
    for f in _CONIFG_NAME:
        if f == 'default.json':
            continue
        n = os.path.join(_CONIFG_PATH, f)
        if fcmp(d, n):
            fd = open(n, 'r')
            s = 'using %s\n%s' %(f, fd.read())
            fd.close()
            return s.translate(None, '\'\"<>&')
    return 'not in the folder'
Exemplo n.º 17
0
def ssu():
    d = os.path.join(_CONIFG_PATH, "default.json")
    for f in _CONIFG_NAME:
        if f == "default.json":
            continue
        n = os.path.join(_CONIFG_PATH, f)
        if fcmp(d, n):
            fd = open(n, "r")
            s = "using %s\n%s" % (f, fd.read())
            fd.close()
            return s.translate(None, "'\"<>&")
    return "not in the folder"
Exemplo n.º 18
0
def test_update_rapid_numbers_forcing_input_file():
    """
    Checks RAPID input file update with forcing data and number validation
    """
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,
                          rapid_connect_file=os.path.join(INPUT_DATA_PATH,
                                                          'rapid_connect.csv'),
                          riv_bas_id_file=os.path.join(INPUT_DATA_PATH,
                                                       'riv_bas_id.csv'),
                          for_tot_id_file=os.path.join(INPUT_DATA_PATH,
                                                       'for_tot_id.csv'),
                          for_use_id_file=os.path.join(INPUT_DATA_PATH,
                                                       'for_use_id.csv'),
                          ZS_dtF=3 * 60 * 60,
                          BS_opt_for=True
                          )

    rapid_manager.update_reach_number_data()

    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_nasa_lis_3hr_20020830.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc',
                                    Qfor_file='qfor.csv',
                                    for_tot_id_file='for_tot_id.csv',
                                    for_use_id_file='for_use_id.csv',
                                    )

    generated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                      "rapid_namelist-GENERATE-NUMBERS-FORCING")

    rapid_manager.generate_namelist_file(generated_input_file)

    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH,
                                                 "rapid_namelist-GENERATE-NUMBERS-FORCING")


    assert (fcmp(generated_input_file, generated_input_file_solution))

    remove_files(generated_input_file)
Exemplo n.º 19
0
def ssc(i, p='restart'):
    index = int(i)
    if os.path.exists(_SSC_LOCK):
        return 'is changing'

    if not index in range(0, len(_CONIFG_NAME)):
        return '%d beyond the scope of the file list.' %(index)

    o = os.path.join(_CONIFG_PATH, _CONIFG_NAME[index])
    n = os.path.join(_CONIFG_PATH, 'default.json')
    if fcmp(o, n): return '%s like %s' %(o, n)

    fd = open(_SSC_LOCK, 'w'); fd.close()

    copy(o, n)
    status, output = getstatusoutput('%s %s' %(_SHELL_PATH, p))

    os.remove(_SSC_LOCK)
    return output
Exemplo n.º 20
0
def ssc(i, p="restart"):
    index = int(i)
    if os.path.exists(_SSC_LOCK):
        return "is changing"

    if not index in range(0, len(_CONIFG_NAME)):
        return "%d beyond the scope of the file list." % (index)

    o = os.path.join(_CONIFG_PATH, _CONIFG_NAME[index])
    n = os.path.join(_CONIFG_PATH, "default.json")
    if fcmp(o, n):
        return "%s like %s" % (o, n)

    fd = open(_SSC_LOCK, "w")
    fd.close()

    copy(o, n)
    status, output = getstatusoutput("%s %s" % (_SHELL_PATH, p))

    os.remove(_SSC_LOCK)
    return output
Exemplo n.º 21
0
def file_judge(code_out_path, ac_out_path):
    return filecmp.fcmp(code_out_path, ac_out_path)
Exemplo n.º 22
0
    def update_directories(self):
        """run through all subfolders and update existing bpl files
        """

        # get all collections to update
        # for each collection:
        collections = self._config.get('update_list')
        for col_name in collections:
            # print 'search for collection "%s"' % col_name
            try:
                _ = self.cat_db.get_collection_id(col_name)
            except AdasDBError as db_err:
                self._logger.warning(db_err)
                self.error_status = ERR_DB_COLL_MISSING
                continue
            # get directory for function
            fct_name = self.create_fct_dir(col_name)
            # create the new bpl file
            bpl_file_name_new = join(self.bpl_top_dir, fct_name,
                                     col_name + '_new.bpl')
            try:
                self.cat_db.export_bpl_for_collection(col_name,
                                                      bpl_file_name_new, True,
                                                      True)
            except AdasDBError as err:
                self._logger.error('problems writing bpl file %s:\n%s' %
                                   (bpl_file_name_new, err))
                self.error_status = ERR_BPL_FILE_CREATION
                continue
            # compare the new bpl file with an existing one (if there is one)
            bpl_file_name = join(self.bpl_top_dir, fct_name, col_name + '.bpl')
            if isfile(bpl_file_name):
                same = fcmp(bpl_file_name, bpl_file_name_new)
                if not same:
                    self._logger.info('update bpl file %s for collection %s' %
                                      (bpl_file_name, col_name))
                    chmod(bpl_file_name, S_IWUSR)
                    remove(bpl_file_name)
                    rename(bpl_file_name_new, bpl_file_name)
                    self.bpl_dict[col_name.lower()]['status'] = 'updated'
                else:
                    self._logger.info('bpl for collection "%s" up to date' %
                                      col_name)
                    remove(bpl_file_name_new)
                    self.bpl_dict[col_name.lower()]['status'] = 'match'
            else:
                # bpl file didn't exist before
                self.bpl_dict[col_name.lower()] = {
                    'status': 'new',
                    'filename': join(self.bpl_top_dir, col_name + '.bsig')
                }
                rename(bpl_file_name_new, bpl_file_name)
                self._logger.info(
                    'created new bpl file "%s" for collection %s' %
                    (bpl_file_name, col_name))

        # check if collections are removed but bpl files exist for that collection
        # and list bpl files that have no matching collections
        all_col_names = self.cat_db.get_all_collection_names()
        for bpl_name in [
                b.lower() for b in self.bpl_dict
                if self.bpl_dict[b]['status'] == 'old'
        ]:
            bpl_file_name = relpath(self.bpl_dict[bpl_name]['filename'],
                                    self.bpl_top_dir)
            if bpl_name in all_col_names:
                self.bpl_dict[bpl_name]['status'] = 'rem_col?'
                self._logger.warning(
                    'collection removed from config? - file %s has matching collection "%s"'
                    % (bpl_file_name, bpl_name))
            else:
                self.bpl_dict[bpl_name]['status'] = 'junk'
                self._logger.warning(
                    'found bpl file with no matching collection: %s' %
                    bpl_file_name)

        # create table with all bpl update results
        with open(join(self.bpl_top_dir, 'bpl_update_result.csv'),
                  'w') as res_file:
            res_file.write('collection; status; bpl file\n')
            for bpl_name in self.bpl_dict:
                res_file.write(bpl_name + '; ' +
                               self.bpl_dict[bpl_name]['status'] + '; ' +
                               relpath(self.bpl_dict[bpl_name]['filename'],
                                       self.bpl_top_dir) + '\n')

        return self.error_status