Exemplo n.º 1
0
def converttest(request):
    file = get_eplus_dirs(settings.ep_version) / "ExampleFiles" / request.param
    # file = request.param
    window_file = "W74-lib.dat"
    template_dir = os.path.join("archetypal", "ressources")
    window_filepath = os.path.join(template_dir, window_file)
    template_d18 = "tests/input_data/trnsys/NewFileTemplate.d18"
    trnsidf_exe = "docker/trnsidf/trnsidf.exe"  # 'docker/trnsidf/trnsidf.exe'

    # prepare args (key=value). Key is a unique id for the runs (here the
    # file basename is used). Value is a dict of the function arguments
    kwargs_dict = {
        "u_value": 2.5,
        "shgc": 0.6,
        "t_vis": 0.78,
        "tolerance": 0.05,
        "ordered": True,
    }
    idf = load_idf(file)

    weather_file = os.path.join(
        "tests", "input_data", "CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw"
    )

    output_folder = os.path.relpath(settings.data_folder)

    yield idf, file, weather_file, window_filepath, trnsidf_exe, template_d18, output_folder, kwargs_dict

    del idf
Exemplo n.º 2
0
def test_wwr():
    from archetypal import load_idf

    idf_file = Path("tests/input_data/necb/").glob(
        "*{}*.idf".format("FullServiceRestaurant"))
    idf = load_idf(next(iter(idf_file)))
    print(idf.name)
    print(idf.wwr(round_to=10))
Exemplo n.º 3
0
def test_run_eplus_from_idf(clean_config):
    file = get_eplus_dirs(
        settings.ep_version) / "ExampleFiles" / "5ZoneNightVent1.idf"
    wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw"

    idf = ar.load_idf(file, weather_file=wf)
    sql = idf.run_eplus(prep_outputs=True, output_report="sql")

    assert sql
Exemplo n.º 4
0
def test_space_heating_profile(config):
    from archetypal import load_idf

    file = "tests/input_data/necb/NECB 2011-Warehouse-NECB HDD Method-CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw.idf"
    wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw"

    idf = load_idf(file, None, weather_file=wf)

    assert not idf.space_heating_profile().empty
Exemplo n.º 5
0
def schedules_idf():
    config(cache_folder="tests/.temp/cache")
    idf = load_idf(
        idf_file,
        include=[
            get_eplus_dirs(settings.ep_version) / "DataSets" / "TDV" /
            "TDV_2008_kBtu_CTZ06.csv"
        ],
    )
    return idf
Exemplo n.º 6
0
def test_space_cooling_profile(config):
    from archetypal import load_idf

    file = (get_eplus_dirs(settings.ep_version) / "ExampleFiles" /
            "BasicsFiles" / "AdultEducationCenter.idf")
    wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw"

    idf = load_idf(file, None, weather_file=wf)

    assert not idf.space_cooling_profile().empty
Exemplo n.º 7
0
def test_load_old(config):
    files = [
        "tests/input_data/problematic/nat_ventilation_SAMPLE0.idf",
        get_eplus_dirs(settings.ep_version) / "ExampleFiles" /
        "5ZoneNightVent1.idf",
    ]

    obj = {os.path.basename(file): ar.load_idf(file) for file in files}

    assert not any(isinstance(a, Exception) for a in obj.values())
Exemplo n.º 8
0
def test_load_idf_asdict(as_dict, processors, fresh_start):
    """Will load an idf object"""

    file1 = './input_data/regular/AdultEducationCenter.idf'
    file2 = './input_data/regular/AdultEducationCenter.idf'
    obj = ar.load_idf([file1, file2], as_dict=as_dict, processors=processors)
    if as_dict:
        assert isinstance(obj, dict)
    else:
        assert isinstance(obj, list)
Exemplo n.º 9
0
def test_dhw_profile(config):
    from archetypal import load_idf

    file = "tests/input_data/necb/NECB 2011-Warehouse-NECB HDD Method-CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw.idf"
    wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw"

    idf = load_idf(file, None, weather_file=wf)

    shw = idf.service_water_heating_profile()
    assert shw.sum() > 0
    print(shw.resample("M").sum())
Exemplo n.º 10
0
def test_area(archetype, area):
    """Test the conditioned_area property against published values
    desired values taken from https://github.com/canmet-energy/btap"""
    import numpy as np
    from archetypal import load_idf

    idf_file = Path("tests/input_data/necb/").glob(
        "*{}*.idf".format(archetype))
    idf = load_idf(next(iter(idf_file)))
    np.testing.assert_almost_equal(actual=idf.area_conditioned,
                                   desired=area,
                                   decimal=0)
Exemplo n.º 11
0
def test_load_idf(config):
    """Will load an idf object"""

    files = [
        get_eplus_dirs(settings.ep_version) / "ExampleFiles" /
        "5ZoneNightVent1.idf",
        get_eplus_dirs(settings.ep_version) / "ExampleFiles" / "BasicsFiles" /
        "AdultEducationCenter.idf",
    ]

    obj = {os.path.basename(file): ar.load_idf(file) for file in files}
    assert isinstance(obj, dict)
Exemplo n.º 12
0
def test_schedules_in_necb_specific(config):
    files = [
        "tests/input_data/necb/NECB 2011-MediumOffice-NECB HDD Method-CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw.idf"
    ]
    idfs = {os.path.basename(file): load_idf(file) for file in files}
    import matplotlib.pyplot as plt

    for key in idfs:
        idf = idfs[key]
        s = Schedule(Name="NECB-A-Thermostat Setpoint-Heating",
                     idf=idf,
                     start_day_of_the_week=0)
        s.plot(slice=("2018/01/02", "2018/01/03"), drawstyle="steps-post")
        plt.show()
Exemplo n.º 13
0
def test_download_and_load_bld_window(clean_config):
    """Download window and load its idf file"""
    oauth_consumer_key = os.environ.get("NREL_CONSUMER_KEY")

    response = download_bld_window(
        u_factor=3.18,
        shgc=0.49,
        vis_trans=0.53,
        oauth_key=oauth_consumer_key,
        tolerance=0.05,
    )
    idf = ar.load_idf(response[0], ep_version=settings.ep_version)
    construct = idf.getobject("CONSTRUCTION", "AEDG-SmOffice 1A Window Fixed")
    ws = ar.WindowSetting.from_construction(Name="test_window",
                                            Construction=construct)

    assert ws
Exemplo n.º 14
0
    def test_reduce_failed(self, clean_config):
        """Tests the 'reduce' method on a failed file"""
        runner = CliRunner()
        test_file = "tests/input_data/necb/NECB 2011-Warehouse-NECB HDD Method-CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw.idf"

        # First, modify file so that it breaks. We will removing the building object.
        idf = load_idf(test_file)
        bldg = idf.idfobjects["BUILDING"][0]
        idf.removeidfobject(bldg)
        idf.save()

        result = runner.invoke(
            cli,
            [
                "--use-cache",
                "--cache-folder",
                "tests/.temp/cache",
                "--data-folder",
                "tests/.temp/data",
                "--imgs-folder",
                "tests/.temp/images",
                "--logs-folder",
                "tests/.temp/logs",
                "--ep_version",
                settings.ep_version,
                "reduce",
                "-w",
                "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw",
                "-p",
                *[idf.idfname, idf.idfname],
                "tests/.temp/retail.json",
            ],
            catch_exceptions=False,
        )
        print(result.stdout)
        # check an error file has been created
        assert Path("failed_reduce.txt").exists()
        assert result.exit_code == 0
Exemplo n.º 15
0
def test_make_umi_schedule(config):
    """Tests only a single schedule name"""
    import matplotlib.pyplot as plt

    idf_file = "tests/input_data/schedules/schedules.idf"
    idf_file = copy_file(idf_file)
    idf = load_idf(idf_file)

    s = UmiSchedule(Name="POFF", idf=idf, start_day_of_the_week=0)
    ep_year, ep_weeks, ep_days = s.to_year_week_day()

    new = UmiSchedule(Name=ep_year.Name,
                      idf=idf,
                      start_day_of_the_week=s.startDayOfTheWeek)

    print(len(s.all_values))
    print(len(new.all_values))
    ax = s.plot(slice=("2018/01/01 00:00", "2018/01/07"), legend=True)
    new.plot(slice=("2018/01/01 00:00", "2018/01/07"), ax=ax, legend=True)
    plt.show()
    assert s.__class__.__name__ == "UmiSchedule"
    assert len(s.all_values) == len(new.all_values)
    assert (new.all_values == s.all_values).all()
Exemplo n.º 16
0
def reduce(idf, output, weather, parallel, all_zones):
    """Perform the model reduction and translate to an UMI template file.

    IDF is one or multiple idf files to process.
    OUTPUT is the output file name (or path) to write to. Optional.
    """
    if parallel:
        # if parallel is True, run eplus in parallel
        rundict = {
            file: dict(
                eplus_file=file,
                weather_file=weather,
                annual=True,
                prep_outputs=True,
                expandobjects=True,
                verbose="v",
                output_report="sql",
                return_idf=False,
                ep_version=settings.ep_version,
            )
            for file in idf
        }
        res = parallel_process(rundict, run_eplus)
        res = _write_invalid(res)

        loaded_idf = {}
        for key, sql in res.items():
            loaded_idf[key] = {}
            loaded_idf[key][0] = sql
            loaded_idf[key][1] = load_idf(key)
        res = loaded_idf
    else:
        # else, run sequentially
        res = defaultdict(dict)
        invalid = []
        for i, fn in enumerate(idf):
            try:
                res[fn][0], res[fn][1] = run_eplus(
                    fn,
                    weather,
                    ep_version=settings.ep_version,
                    output_report="sql",
                    prep_outputs=True,
                    annual=True,
                    design_day=False,
                    verbose="v",
                    return_idf=True,
                )
            except EnergyPlusProcessError as e:
                invalid.append({"#": i, "Filename": fn.basename(), "Error": e})
        if invalid:
            filename = Path("failed_reduce.txt")
            with open(filename, "w") as failures:
                failures.writelines(tabulate(invalid, headers="keys"))
                log('Invalid run listed in "%s"' % filename)

    from archetypal import BuildingTemplate

    bts = []
    for fn in res.values():
        sql = next(
            iter([
                value for key, value in fn.items() if isinstance(value, dict)
            ]))
        idf = next(
            iter([
                value for key, value in fn.items() if isinstance(value, IDF)
            ]))
        bts.append(BuildingTemplate.from_idf(idf, sql=sql,
                                             DataSource=idf.name))

    output = Path(output)
    name = output.namebase
    ext = output.ext if output.ext == ".json" else ".json"
    dir_ = output.dirname()
    template = UmiTemplate(name=name, BuildingTemplates=bts)
    final_path: Path = dir_ / name + ext
    template.to_json(path_or_buf=final_path, all_zones=all_zones)
    log("Successfully created template file at {}".format(
        final_path.abspath()))
Exemplo n.º 17
0
    def read_idf(cls,
                 idf_files,
                 weather,
                 sql=None,
                 name="unnamed",
                 load_idf_kwargs=None):
        """Initializes an UmiTemplate object from one or more idf_files.

        The resulting object contains the reduced version of the IDF files.
        To save to file, call the :meth:`to_json` method.

        Args:
            idf_files (str or list): One or more IDF file paths.
            weather (str): Path to the weather file.
            sql:
            name:
            load_idf_kwargs (dict): kwargs passed to the
                :meth:`archetypal.idfclass.load_idf` method.
        """
        if load_idf_kwargs is None:
            load_idf_kwargs = {}
        # instantiate class
        t = cls(name)

        # fill in arguments
        t.idf_files = idf_files
        t.weather = weather
        t.sql = sql

        # Load IDF objects
        t.idfs = [
            load_idf(idf_file, weather_file=weather, **load_idf_kwargs)
            for idf_file in idf_files
        ]

        # For each idf load
        template_obj = []
        for idf in t.idfs:
            bldg = BuildingTemplate.from_idf(idf,
                                             sql=idf.sql,
                                             DataSource=idf.name)
            template_obj.append(bldg)
            for name in [
                    DaySchedule,
                    DomesticHotWaterSetting,
                    GasMaterial,
                    GlazingMaterial,
                    OpaqueConstruction,
                    OpaqueMaterial,
                    StructureDefinition,
                    VentilationSetting,
                    WeekSchedule,
                    WindowConstruction,
                    YearSchedule,
                    ZoneConditioning,
                    ZoneConstructionSet,
                    ZoneLoad,
                    Zone,
            ]:
                t.__dict__[name.__name__ + "s"].extend([
                    obj for obj in bldg.all_objects.values()
                    if isinstance(obj, name)
                ])

        t.BuildingTemplates = template_obj

        return t
Exemplo n.º 18
0
def test_partition_ratio():
    from archetypal import load_idf

    idf_file = Path("tests/input_data/necb/").glob("*LargeOffice*.idf")
    idf = load_idf(next(iter(idf_file)))
    print(idf.partition_ratio)