Exemplo n.º 1
0
        if fnm in names:
            new_names.append(fnm)
    names = new_names

if args.verbosity > 1:
    print("Names:", names)

if len(names) == 0:
    print "No tests to run"
    sys.exit(0)

# Make sure we have sample data
if not os.path.exists("test_data"):
    os.makedirs("test_data")
cdat_info.download_sample_data_files(
    os.path.join(sys.prefix, "share", "pmp", "test_data_files.txt"),
    "test_data")
if args.update:
    os.environ["UPDATE_TESTS"] = "True"
if args.traceback:
    os.environ["TRACEBACK"] = "True"

p = multiprocessing.Pool(args.cpus)
outs = p.map(run_nose, names)
results = {}
failed = []
for d in outs:
    results.update(d)
    nm = d.keys()[0]
    if d[nm]["result"] != 0:
        failed.append(nm)
Exemplo n.º 2
0
    f.close()
    new_names = []
    for fnm in failed:
        if fnm in names:
            new_names.append(fnm)
    names = new_names

if args.verbosity > 1:
    print(("Names:", names))

if len(names)==0:
    print("No tests to run")
    sys.exit(0)

# Make sure we have sample data
cdat_info.download_sample_data_files(os.path.join(sys.prefix,"share","vcsaddons","test_data_files.txt"),cdat_info.get_sampledata_path())
p = multiprocessing.Pool(args.cpus)
try:
    outs = p.map_async(run_nose, names).get(3600)
except KeyboardInterrupt:
    sys.exit(1)
results = {}
failed = []
for d in outs:
    results.update(d)
    nm = list(d.keys())[0]
    if d[nm]["result"] != 0:
        failed.append(nm)
f = open(os.path.join("tests",".last_failure"),"w")
f.write(repr(failed))
f.close()
Exemplo n.º 3
0
    import tempfile
    tmpdir = tempfile.mkdtemp()
    os.chdir(tmpdir)
    names = [os.path.join(root, t) for t in names]
    print("RUNNNIG FROM:", tmpdir)

if len(names) == 0:
    print("No tests to run")
    sys.exit(0)

if args.verbosity > 1:
    print(("Names:", names))

# Make sure we have sample data
cdat_info.download_sample_data_files(
    os.path.join(distutils.sysconfig.get_python_lib(), "share", "cdms2",
                 "test_data_files.txt"), cdat_info.get_sampledata_path())

p = multiprocessing.Pool(args.cpus)
outs = p.map(run_nose, names)
results = {}
failed = []
for d in outs:
    results.update(d)
    nm = list(d.keys())[0]
    if d[nm]["result"] != 0:
        failed.append(nm)
if args.subdir:
    f = open(os.path.join(root, "tests", ".last_failure"), "w")
else:
    f = open(os.path.join("tests", ".last_failure"), "w")
Exemplo n.º 4
0
import cdms2
import cdat_info
import os

cdat_info.download_sample_data_files("data/sample_files.txt")

f = cdms2.open(
    os.path.join(cdat_info.get_sampledata_path(), "ta_ncep_87-6-88-4.nc"))

ta = f("ta", level=slice(0, 6))
ta.getTime().units = "days since 1949-1-1"
ta2 = f("ta", level=slice(6, None))
ta2.shape
ta2.getTime().units = "days since 1949-1-1"
ta1 = cdms2.open("ta1.nc", "w")
cdms2.setNetcdfDeflateLevelFlag(0)
cdms2.setNetcdfDeflateFlag(0)
cdms2.setNetcdfShuffleFlag(0)
ta1 = cdms2.open("ta1.nc", "w")
ta1.write(ta[:4], id='ta')
ta1.close()
ta1 = cdms2.open("ta1b.nc", "w")
ta1.write(ta[4:], id='ta')
ta1.close()
ta1 = cdms2.open("ta2.nc", "w")
ta1.write(ta2[:4], id='ta')
ta1.close()
ta1 = cdms2.open("ta2b.nc", "w")
ta1.write(ta2[4:], id='ta')
ta1.close()
Exemplo n.º 5
0
parser.add_argument(
    "--version_in_path",
    action="store_true",
    default=False,
    help="Append version in root path, avoids clobbering versions",
)
parser.add_argument("--output-path", help="directory where to download", default=None)
# parser.use("num_workers")
p = parser.get_parameter()

# Step1 prepare the paths to get the sample datafiles
pth = tempfile.mkdtemp()
files = []
if p.dataset in ["all", "obs"]:  # ok we need obs
    download_file(p.server, "obs_{}.txt".format(p.version), "obs.txt")
    files.append("obs.txt")
if p.dataset in ["all", "sample"]:
    download_file(p.server, "sample_{}.txt".format(p.version), "sample.txt")
    files.append("sample.txt")

# Ok now we can download
for file in files:
    # First do we clobber or not?
    pathout = p.output_path
    if p.version_in_path:
        with open(file) as f:
            header = f.readline().strip()
            version = header.split("_")[-1]
            pathout = os.path.join(p.output_path, version)
    cdat_info.download_sample_data_files(file, path=pathout)
Exemplo n.º 6
0
    for name in filenames:
        with open(name) as template_file:
            print("Preparing parameter file: {}".format(name[:-3]))
            template = template_file.read()
            for key in sub_dict:
                template = template.replace("${}$".format(key), sub_dict[key])
            with open(name[:-3], "w") as param_file:
                param_file.write(template)

    print("Saving User Choices")
    with open("user_choices.py", "w") as f:
        print("demo_data_directory = '{}'".format(demo_data_directory), file=f)
        print("demo_output_directory = '{}'".format(demo_output_directory),
              file=f)


if __name__ == "__main__":
    """Perform the same actions as Demo 0 notebook: Get the tutorial file list,
    download the sample data, and generate the parameter files."""
    import requests
    import cdat_info

    r = requests.get(
        "https://pcmdiweb.llnl.gov/pss/pmpdata/pmp_tutorial_files.txt")
    with open("data_files.txt", "wb") as f:
        f.write(r.content)

    demo_data_directory = "demo_data"
    demo_output_directory = "demo_output"
    cdat_info.download_sample_data_files("data_files.txt", demo_data_directory)
    generate_parameter_files(demo_data_directory, demo_output_directory)
Exemplo n.º 7
0
parser.add_argument("--version", help="which version to use", default="latest")
parser.add_argument("--server", help="which server to use",
                    default="https://pcmdiweb.llnl.gov/pss/pmpdata")
parser.add_argument("--version_in_path", action="store_true", default=False,
                    help="Append version in root path, avoids clobbering versions")
parser.add_argument(
    "--output-path", help="directory where to download", default=None)
# parser.use("num_workers")
p = parser.get_parameter()

# Step1 prepare the paths to get the sample datafiles
pth = tempfile.mkdtemp()
files = []
if p.dataset in ["all", "obs"]:  # ok we need obs
    download_file(p.server, "obs_{}.txt".format(p.version), "obs.txt")
    files.append("obs.txt")
if p.dataset in ["all", "sample"]:
    download_file(p.server, "sample_{}.txt".format(p.version), "sample.txt")
    files.append("sample.txt")

# Ok now we can download
for file in files:
    # First do we clobber or not?
    pathout = p.output_path
    if p.version_in_path:
        with open(file) as f:
            header = f.readline().strip()
            version = header.split("_")[-1]
            pathout = os.path.join(p.output_path, version)
    cdat_info.download_sample_data_files(file, path=pathout)