コード例 #1
0
ファイル: test_location.py プロジェクト: MLEEFS/pvlib-python
def test_get_airmass_valueerror():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    times = pd.DatetimeIndex(start='20160101T0600-0700',
                             end='20160101T1800-0700',
                             freq='3H')
    with pytest.raises(ValueError):
        clearsky = tus.get_airmass(times, model='invalid_model')
コード例 #2
0
ファイル: test_location.py プロジェクト: MLEEFS/pvlib-python
def test_get_airmass():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    times = pd.DatetimeIndex(start='20160101T0600-0700',
                             end='20160101T1800-0700',
                             freq='3H')
    airmass = tus.get_airmass(times)
    expected = pd.DataFrame(data=np.array(
                            [[        nan,         nan],
                             [ 3.61046506,  3.32072602],
                             [ 1.76470864,  1.62309115],
                             [ 2.45582153,  2.25874238],
                             [        nan,         nan]]),
                            columns=['airmass_relative', 'airmass_absolute'],
                            index=times)
    assert_frame_equal(expected, airmass)

    airmass = tus.get_airmass(times, model='young1994')
    expected = pd.DataFrame(data=np.array(
                            [[        nan,         nan],
                             [ 3.6075018 ,  3.31800056],
                             [ 1.7641033 ,  1.62253439],
                             [ 2.45413091,  2.25718744],
                             [        nan,         nan]]),
                            columns=['airmass_relative', 'airmass_absolute'],
                            index=times)
    assert_frame_equal(expected, airmass)
コード例 #3
0
ファイル: test_tracking.py プロジェクト: MLEEFS/pvlib-python
def test_get_irradiance():
    system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30,
                                        axis_azimuth=180, gcr=2.0/7.0,
                                        backtrack=True)
    times = pd.DatetimeIndex(start='20160101 1200-0700',
                             end='20160101 1800-0700', freq='6H')
    location = Location(latitude=32, longitude=-111)
    solar_position = location.get_solarposition(times)
    irrads = pd.DataFrame({'dni':[900,0], 'ghi':[600,0], 'dhi':[100,0]},
                          index=times)
    solar_zenith = solar_position['apparent_zenith']
    solar_azimuth = solar_position['azimuth']
    tracker_data = system.singleaxis(solar_zenith, solar_azimuth)

    irradiance = system.get_irradiance(irrads['dni'],
                                       irrads['ghi'],
                                       irrads['dhi'],
                                       solar_zenith=solar_zenith,
                                       solar_azimuth=solar_azimuth,
                                       surface_tilt=tracker_data['surface_tilt'],
                                       surface_azimuth=tracker_data['surface_azimuth'])

    expected = pd.DataFrame(data=np.array(
        [[ 961.80070,   815.94490,   145.85580,   135.32820,
          10.52757492],
       [          nan,           nan,           nan,           nan,
                  nan]]),
                            columns=['poa_global', 'poa_direct',
                                     'poa_diffuse', 'poa_sky_diffuse',
                                     'poa_ground_diffuse'],
                            index=times)

    assert_frame_equal(irradiance, expected, check_less_precise=2)
コード例 #4
0
def test_PVSystem_get_irradiance():
    system = pvsystem.PVSystem(surface_tilt=32, surface_azimuth=135)
    times = pd.DatetimeIndex(start='20160101 1200-0700',
                             end='20160101 1800-0700', freq='6H')
    location = Location(latitude=32, longitude=-111)
    solar_position = location.get_solarposition(times)
    irrads = pd.DataFrame({'dni':[900,0], 'ghi':[600,0], 'dhi':[100,0]},
                          index=times)

    irradiance = system.get_irradiance(solar_position['apparent_zenith'],
                                       solar_position['azimuth'],
                                       irrads['dni'],
                                       irrads['ghi'],
                                       irrads['dhi'])

    expected = pd.DataFrame(data=np.array(
        [[ 883.65494055,  745.86141676,  137.79352379,  126.397131  ,
              11.39639279],
           [   0.        ,   -0.        ,    0.        ,    0.        ,    0.        ]]),
                            columns=['poa_global', 'poa_direct',
                                     'poa_diffuse', 'poa_sky_diffuse',
                                     'poa_ground_diffuse'],
                            index=times)

    irradiance = np.round(irradiance, 4)
    expected = np.round(expected, 4)
    assert_frame_equal(irradiance, expected)
コード例 #5
0
def test_location_print_pytz():
    tus = Location(32.2, -111, pytz.timezone('US/Arizona'), 700, 'Tucson')
    expected_str = '\n'.join([
        'Location: ',
        '  name: Tucson',
        '  latitude: 32.2',
        '  longitude: -111',
        '  altitude: 700',
        '  tz: US/Arizona'
])
    assert tus.__str__() == expected_str
コード例 #6
0
def test_get_clearsky_haurwitz(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    clearsky = tus.get_clearsky(times, model='haurwitz')
    expected = pd.DataFrame(data=np.array(
                            [[   0.        ],
                             [ 242.30085588],
                             [ 559.38247117],
                             [ 384.6873791 ],
                             [   0.        ]]),
                            columns=['ghi'],
                            index=times)
    assert_frame_equal(expected, clearsky)
コード例 #7
0
def test_get_clearsky(mocker, times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    m = mocker.spy(pvlib.clearsky, 'ineichen')
    out = tus.get_clearsky(times)
    assert m.call_count == 1
    assert_index_equal(out.index, times)
    # check that values are 0 before sunrise and after sunset
    assert out.iloc[0, :].sum().sum() == 0
    assert out.iloc[-1:, :].sum().sum() == 0
    # check that values are > 0 during the day
    assert (out.iloc[1:-1, :] > 0).all().all()
    assert (out.columns.values == ['ghi', 'dni', 'dhi']).all()
コード例 #8
0
def test_Location___repr__():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')

    expected = '\n'.join([
        'Location: ',
        '  name: Tucson',
        '  latitude: 32.2',
        '  longitude: -111',
        '  altitude: 700',
        '  tz: US/Arizona'
])
    assert tus.__repr__() == expected
コード例 #9
0
def test_get_clearsky_simplified_solis(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    clearsky = tus.get_clearsky(times, model='simplified_solis')
    expected = pd.DataFrame(data=np.
        array([[   0.        ,    0.        ,    0.        ],
               [  70.00146271,  638.01145669,  236.71136245],
               [ 101.69729217,  852.51950946,  577.1117803 ],
               [  86.1679965 ,  755.98048017,  385.59586091],
               [   0.        ,    0.        ,    0.        ]]),
                            columns=['dhi', 'dni', 'ghi'],
                            index=times)
    expected = expected[['ghi', 'dni', 'dhi']]
    assert_frame_equal(expected, clearsky, check_less_precise=2)
コード例 #10
0
def test_get_clearsky_simplified_solis_pressure(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    clearsky = tus.get_clearsky(times, model='simplified_solis',
                                pressure=95000)
    expected = pd.DataFrame(data=np.
        array([[   0.        ,    0.        ,    0.        ],
               [  70.20556637,  635.53091983,  236.17716435],
               [ 102.08954904,  850.49502085,  576.28465815],
               [  86.46561686,  753.70744638,  384.90537859],
               [   0.        ,    0.        ,    0.        ]]),
                            columns=['dhi', 'dni', 'ghi'],
                            index=times)
    expected = expected[['ghi', 'dni', 'dhi']]
    assert_frame_equal(expected, clearsky, check_less_precise=2)
コード例 #11
0
def test_get_clearsky_simplified_solis_dni_extra(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    clearsky = tus.get_clearsky(times, model='simplified_solis',
                                dni_extra=1370)
    expected = pd.DataFrame(data=np.
        array([[   0.        ,    0.        ,    0.        ],
               [  67.82281485,  618.15469596,  229.34422063],
               [  98.53217848,  825.98663808,  559.15039353],
               [  83.48619937,  732.45218243,  373.59500313],
               [   0.        ,    0.        ,    0.        ]]),
                            columns=['dhi', 'dni', 'ghi'],
                            index=times)
    expected = expected[['ghi', 'dni', 'dhi']]
    assert_frame_equal(expected, clearsky)
コード例 #12
0
ファイル: test_location.py プロジェクト: MLEEFS/pvlib-python
def test_get_clearsky():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    times = pd.DatetimeIndex(start='20160101T0600-0700',
                             end='20160101T1800-0700',
                             freq='3H')
    clearsky = tus.get_clearsky(times)
    expected = pd.DataFrame(data=np.array([
        (  0.0,                0.0,              0.0),
        (262.77734276159333, 791.1972825869296, 46.18714900637892),
        (616.764693938387,   974.9610353623959, 65.44157429054201),
        (419.6512657626518,  901.6234995035793, 54.26016437839348),
        (  0.0,                0.0,              0.0)],
        dtype=[('ghi', '<f8'), ('dni', '<f8'), ('dhi', '<f8')]), index=times)
    assert_frame_equal(expected, clearsky, check_less_precise=2)
コード例 #13
0
def test_get_clearsky_simplified_solis_aod_pw(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    clearsky = tus.get_clearsky(times, model='simplified_solis',
                                aod700=0.25, precipitable_water=2.)
    expected = pd.DataFrame(data=np.
        array([[   0.        ,    0.        ,    0.        ],
               [  85.77821205,  374.58084365,  179.48483117],
               [ 143.52743364,  625.91745295,  490.06254157],
               [ 114.63275842,  506.52275195,  312.24711495],
               [   0.        ,    0.        ,    0.        ]]),
                            columns=['dhi', 'dni', 'ghi'],
                            index=times)
    expected = expected[['ghi', 'dni', 'dhi']]
    assert_frame_equal(expected, clearsky, check_less_precise=2)
コード例 #14
0
def test_get_clearsky_ineichen_supply_linke(mocker):
    tus = Location(32.2, -111, 'US/Arizona', 700)
    times = pd.date_range(start='2014-06-24-0700', end='2014-06-25-0700',
                          freq='3h')
    mocker.spy(pvlib.clearsky, 'ineichen')
    out = tus.get_clearsky(times, linke_turbidity=3)
    # we only care that the LT is passed in this test
    pvlib.clearsky.ineichen.assert_called_once_with(ANY, ANY, 3, ANY, ANY)
    assert_index_equal(out.index, times)
    # check that values are 0 before sunrise and after sunset
    assert out.iloc[0:2, :].sum().sum() == 0
    assert out.iloc[-2:, :].sum().sum() == 0
    # check that values are > 0 during the day
    assert (out.iloc[2:-2, :] > 0).all().all()
    assert (out.columns.values == ['ghi', 'dni', 'dhi']).all()
コード例 #15
0
ファイル: test_location.py プロジェクト: caskeep/pvlib-python
def test_get_clearsky():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    times = pd.DatetimeIndex(start='20160101T0600-0700',
                             end='20160101T1800-0700',
                             freq='3H')
    clearsky = tus.get_clearsky(times)
    expected = pd.DataFrame(data=np.
        array([[   0.        ,    0.        ,    0.        ],
               [ 258.60422702,  761.57329257,   50.1235982 ],
               [ 611.96347869,  956.95353414,   70.8232806 ],
               [ 415.10904044,  878.52649603,   59.07820922],
               [   0.        ,    0.        ,    0.        ]]),
                            columns=['ghi', 'dni', 'dhi'],
                            index=times)
    assert_frame_equal(expected, clearsky, check_less_precise=2)
コード例 #16
0
ファイル: test_location.py プロジェクト: MLEEFS/pvlib-python
def test_get_clearsky_haurwitz():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    times = pd.DatetimeIndex(start='20160101T0600-0700',
                             end='20160101T1800-0700',
                             freq='3H')
    clearsky = tus.get_clearsky(times, model='haurwitz')
    expected = pd.DataFrame(data=np.array(
                            [[   0.        ],
                             [ 242.30085588],
                             [ 559.38247117],
                             [ 384.6873791 ],
                             [   0.        ]]),
                            columns=['ghi'],
                            index=times)
    assert_frame_equal(expected, clearsky)
コード例 #17
0
def test_get_clearsky_simplified_solis_apparent_elevation(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    solar_position = {'apparent_elevation': pd.Series(80, index=times),
                      'apparent_zenith': pd.Series(10, index=times)}
    clearsky = tus.get_clearsky(times, model='simplified_solis',
                                solar_position=solar_position)
    expected = pd.DataFrame(data=np.
        array([[  131.3124497 ,  1001.14754036,  1108.14147919],
               [  131.3124497 ,  1001.14754036,  1108.14147919],
               [  131.3124497 ,  1001.14754036,  1108.14147919],
               [  131.3124497 ,  1001.14754036,  1108.14147919],
               [  131.3124497 ,  1001.14754036,  1108.14147919]]),
                            columns=['dhi', 'dni', 'ghi'],
                            index=times)
    expected = expected[['ghi', 'dni', 'dhi']]
    assert_frame_equal(expected, clearsky, check_less_precise=2)
コード例 #18
0
def test_from_tmy_2():
    from test_tmy import tmy2_testfile
    from pvlib.iotools import read_tmy2
    data, meta = read_tmy2(tmy2_testfile)
    loc = Location.from_tmy(meta, data)
    assert loc.name is not None
    assert loc.altitude != 0
    assert loc.tz != 'UTC'
    assert_frame_equal(loc.tmy_data, data)
コード例 #19
0
ファイル: test_location.py プロジェクト: MLEEFS/pvlib-python
def test_get_clearsky_ineichen_supply_linke():
    tus = Location(32.2, -111, 'US/Arizona', 700)
    times = pd.date_range(start='2014-06-24', end='2014-06-25', freq='3h')
    times_localized = times.tz_localize(tus.tz)
    expected = pd.DataFrame(np.
        array([[    0.        ,     0.        ,     0.        ],
               [    0.        ,     0.        ,     0.        ],
               [   79.73090244,   316.16436502,    40.45759009],
               [  703.43653498,   876.41452667,    95.15798252],
               [ 1042.37962396,   939.86391062,   118.44687715],
               [  851.32411813,   909.11186737,   105.36662462],
               [  257.18266827,   646.16644264,    62.02777094],
               [    0.        ,     0.        ,     0.        ],
               [    0.        ,     0.        ,     0.        ]]),
                            columns=['ghi', 'dni', 'dhi'],
                            index=times_localized)
    out = tus.get_clearsky(times_localized, linke_turbidity=3)
    assert_frame_equal(expected, out, check_less_precise=2)
コード例 #20
0
ファイル: forecast.py プロジェクト: dpete2008/Sandia
    def set_location(self, time, latitude, longitude):
        '''
        Sets the location for the query.

        Parameters
        ----------
        time: datetime or DatetimeIndex
            Time range of the query.
        '''
        if isinstance(time, datetime.datetime):
            tzinfo = time.tzinfo
        else:
            tzinfo = time.tz

        if tzinfo is None:
            self.location = Location(latitude, longitude)
        else:
            self.location = Location(latitude, longitude, tz=tzinfo)
コード例 #21
0
def detect_clearsky_data():
    test_dir = os.path.dirname(os.path.abspath(
        inspect.getfile(inspect.currentframe())))
    file = os.path.join(test_dir, '..', 'data', 'detect_clearsky_data.csv')
    expected = pd.read_csv(file, index_col=0, parse_dates=True, comment='#')
    expected = expected.tz_localize('UTC').tz_convert('Etc/GMT+7')
    metadata = {}
    with open(file) as f:
        for line in f:
            if line.startswith('#'):
                key, value = line.strip('# \n').split(':')
                metadata[key] = float(value)
            else:
                break
    metadata['window_length'] = int(metadata['window_length'])
    loc = Location(metadata['latitude'], metadata['longitude'],
                   altitude=metadata['elevation'])
    # specify turbidity to guard against future lookup changes
    cs = loc.get_clearsky(expected.index, linke_turbidity=2.658197)
    return expected, cs
コード例 #22
0
ファイル: tracking.py プロジェクト: mikofski/pvlib-python
    def __init__(self, pvsystem=None, location=None, **kwargs):

        # get and combine attributes from the pvsystem and/or location
        # with the rest of the kwargs

        if pvsystem is not None:
            pv_dict = pvsystem.__dict__
        else:
            pv_dict = {}

        if location is not None:
            loc_dict = location.__dict__
        else:
            loc_dict = {}

        new_kwargs = dict(list(pv_dict.items()) +
                          list(loc_dict.items()) +
                          list(kwargs.items()))

        SingleAxisTracker.__init__(self, **new_kwargs)
        Location.__init__(self, **new_kwargs)
コード例 #23
0
def test_get_irradiance():
    system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30,
                                        axis_azimuth=180, gcr=2.0/7.0,
                                        backtrack=True)
    times = pd.date_range(start='20160101 1200-0700',
                          end='20160101 1800-0700', freq='6H')
    location = Location(latitude=32, longitude=-111)
    solar_position = location.get_solarposition(times)
    irrads = pd.DataFrame({'dni': [900, 0], 'ghi': [600, 0], 'dhi': [100, 0]},
                          index=times)
    solar_zenith = solar_position['apparent_zenith']
    solar_azimuth = solar_position['azimuth']

    # invalid warnings already generated in horizon test above,
    # no need to clutter test output here
    with np.errstate(invalid='ignore'):
        tracker_data = system.singleaxis(solar_zenith, solar_azimuth)

    # some invalid values in irradiance.py. not our problem here
    with np.errstate(invalid='ignore'):
        irradiance = system.get_irradiance(tracker_data['surface_tilt'],
                                           tracker_data['surface_azimuth'],
                                           solar_zenith,
                                           solar_azimuth,
                                           irrads['dni'],
                                           irrads['ghi'],
                                           irrads['dhi'])

    expected = pd.DataFrame(data=np.array(
        [[961.80070,   815.94490,   145.85580,   135.32820, 10.52757492],
         [nan, nan, nan, nan, nan]]),
                            columns=['poa_global', 'poa_direct',
                                     'poa_diffuse', 'poa_sky_diffuse',
                                     'poa_ground_diffuse'],
                            index=times)

    assert_frame_equal(irradiance, expected, check_less_precise=2)
コード例 #24
0
def cloudiness(InputFilePath):
    # -*- coding: utf-8 -*-
    from PIL import Image, ImageDraw, ImageOps, ImageFont
    import math
    from pvlib.location import Location
    import matplotlib.dates as mdate
    import pvlib
    import pandas as pd
    import datetime
    import glob
    import numpy as np
    from time import clock
    import copy
    from Converter import convert

    #Information:
    #
    #Code written by Marcus Klingebiel, Max-Planck-Institute for Meteorology
    #E-Mail: [email protected]
    #
    #PLEASE ASK BEFORE SHARING THIS CODE!
    #
    #
    #Preliminary version of the All-Sky Cloud Algorithms (ASCA)
    #The code is based on the analysis of every single pixel on a jpeg-Image.
    #The used Ski-Index and Brightness-Index base on Letu et al. (2014), Applied Optics, Vol. 53, No. 31.
    #
    #
    #
    #Marcus Klingebiel, March 2016

    #Code eddited by Tobias Machnitzki
    #Email: [email protected]
    print("Calculating Cloudcoverage")

    #--------------------Settings------------------------------------------------------------------------------------------

    debugger = False  #if true, the program will print a message after each step

    TXTFile = False  #if True, the program will generate a csv file with several information. Delimiter = ','

    #    imagefont_size = 20 #Sets the font size of everything written into the picture

    Radius_synop = False  #If True: not the whole sky will be used, but just the 60 degrees from the middle on (like the DWD does with cloud covering)

    Save_image = False  #If True: an image will be printed at output-location, where recognized clouds are collored.

    #    font = ImageFont.truetype("/home/tobias/anaconda3/lib/python3.5/site-packages/matplotlib/mpl-data/fonts/ttf/Vera.ttf",imagefont_size)    # Font

    set_scale_factor = 100  #this factor sets the acuracy of the program. By scaling down the image size the program gets faster but also its acuracy dercreases.
    #It needs to be between 1 and 100. If set 100, then the original size of the image will be used.
    #If set to 50 the image will be scaled down to half its size
    #

    #---------------------Calcutlate the SI-parameter--------------------------------------------------------------------------------------
    #The Parameter gets calculated before the loop over all filse start, to save computing time.
    #To see how the function for the parameter was generated, see the documentation.

    size = 100
    parameter = np.zeros(size)
    for j in range(size):
        parameter[j] = (0 + j * 0.4424283716980435 -
                        pow(j, 2) * 0.06676211439554262 +
                        pow(j, 3) * 0.0026358061791573453 -
                        pow(j, 4) * 0.000029417130873311177 +
                        pow(j, 5) * 1.0292852149593944e-7) * 0.001

    #----------------------Read files------------------------------------------------------------------------------------------------------

    OutputPath = "/media/MPI/ASCA/images/s160521/out/"

    cloudiness_value = []
    ASCAtime = []
    cloudmasks = []
    #    print(InputFilePath)

    for InputFile in sorted(glob.glob(InputFilePath + '/*.jpg')):

        #---------------------------------------------------------------------------------------------------------
        #--------Get day and time------------
        if debugger == True:
            print("Getting day and time")

        date_str = InputFile[len(InputFile) - 19:len(InputFile) - 19 + 12]
        if debugger == True:
            print("Date_Str: " + date_str)
        Year_str = date_str[0:2]
        Month_str = date_str[2:4]
        Day_str = date_str[4:6]
        Hour_str = date_str[6:8]
        Minute_str = date_str[8:10]
        Second_str = date_str[10:12]

        Year = int(date_str[0:2])
        Month = int(date_str[2:4])
        Day = int(date_str[4:6])
        Hour = int(date_str[6:8])
        Minute = int(date_str[8:10])
        Second = int(date_str[10:12])

        #------------Calculate SZA--------------------------------------------------------------------------------------------------------------
        if debugger == True:
            print("Calculating SZA")

        tus = Location(
            13.164, -59.433, 'UTC', 70, 'BCO'
        )  #This is the location of the Cloud camera used for calculating the Position of the sun in the picture
        times = pd.date_range(start=datetime.datetime(Year + 2000, Month, Day,
                                                      Hour, Minute, Second),
                              end=datetime.datetime(Year + 2000, Month, Day,
                                                    Hour, Minute, Second),
                              freq='10s')
        times_loc = times.tz_localize(tus.pytz)
        pos = pvlib.solarposition.get_solarposition(times_loc,
                                                    tus.latitude,
                                                    tus.longitude,
                                                    method='nrel_numpy',
                                                    pressure=101325,
                                                    temperature=25)
        sza = float(pos.zenith[0])
        if debugger:
            print("sza=" + str(sza))

        if (84 < sza <=
                85):  #The program will only process images made at daylight

            time1 = clock()
            azimuth = float(pos.azimuth[0])
            sza_orig = sza
            azi_orig = azimuth
            azimuth = azimuth + 190  #197 good
            #            print(( str(sza) + '   '+Hour_str+':'+Minute_str))
            if azimuth > 360:
                azimuth = azimuth - 360

    #------------Open csv-File-------------------------------------------------------------------------------------------------------------
            if debugger == True:
                print("Open csv-File")

            if TXTFile == True:
                f = open(
                    OutputPath + Year_str + Month_str + Day_str + '_' +
                    Hour_str + Minute_str + Second_str + '_ASCA.csv', 'w')
                f.write(
                    'Seconds_since_1970, UTC_Time, SZA_in_degree, Azimuth_in_degree, Cloudiness_in_percent, Cloudiness_in_oktas'
                    + '\n')
                TXTFile = False

    #---Read image and set some parameters-------------------------------------------------------------------------------------------------
            if debugger == True:
                print("Reading image and setting parameters")

            #------------rescale picture-------------------------------------------
            image = Image.open(InputFile)

            x_size_raw = image.size[0]
            y_size_raw = image.size[1]
            scale_factor = (set_scale_factor / 100.)
            NEW_SIZE = (x_size_raw * scale_factor, y_size_raw * scale_factor)
            image.thumbnail(NEW_SIZE, Image.ANTIALIAS)

            image = ImageOps.mirror(image)  #Mirror picture

            x_size = image.size[0]
            y_size = image.size[1]
            x_mittel = x_size / 2  # Detect center of the true image
            y_mittel = y_size / 2
            Radius = 900  #pixel    #  Set area for the true allsky image

            scale = x_size / 2592.

            #-------------convert image to an array and remove unnecessary part araund true allsky image-----------------------------------------------------------------
            if debugger == True:
                print("Drawing circle around image and removing the rest")

            r = Radius * scale
            y, x = np.ogrid[-y_mittel:y_size - y_mittel,
                            -x_mittel:x_size - x_mittel]
            x = x + (15 * scale)  #move centerpoint manually
            y = y - (40 * scale)
            mask = x**2 + y**2 <= r**2  #make a circular boolean array which is false in the area outside the true allsky image

            image_array = np.asarray(
                image, order='F'
            )  #converting the image to an array with array[x,y,color]; color: 0=red, 1,green, 2=blue
            image_array.setflags(
                write=True
            )  #making it able to work with that array and change it
            image_array[:, :, :][~mask] = [
                0, 0, 0
            ]  #using the mask created before on that new made array

            if Radius_synop == True:
                mask = x**2 + y**2 <= (765 * scale)**2
                image_array[:, :, :][~mask] = [0, 0, 0]

            del x, y
            #
            #------------Calculate position of sun on picture---------------------------------------------------------------------------------------
            if debugger == True:
                print("Calculating position of the sun on picture")

            sza = sza - 90
            if sza < 0:
                sza = sza * (-1)

            AzimutWinkel = ((2 * math.pi) / 360) * (azimuth - 90)
            sza = ((2 * math.pi) / 360) * sza
            x_sol_cen = x_mittel - (15 * scale)
            y_sol_cen = y_mittel + (40 * scale)
            RadiusBild = r
            sza_dist = RadiusBild * math.cos(sza)

            x = x_sol_cen - sza_dist * math.cos(AzimutWinkel)
            y = y_sol_cen - sza_dist * math.sin(AzimutWinkel)

            ###-----------Draw circle around position of sun-------------------------------------------------------------------------------------------
            if debugger == True:
                print("Drawing circle around position of sun")

            x_sol_cen = int(x)
            y_sol_cen = int(y)
            Radius_sol = 300 * scale
            Radius_sol_center = 250 * scale

            y, x = np.ogrid[-y_sol_cen:y_size - y_sol_cen,
                            -x_sol_cen:x_size - x_sol_cen]
            sol_mask = x**2 + y**2 <= Radius_sol**2
            sol_mask_cen = x**2 + y**2 <= Radius_sol_center**2
            sol_mask_cen1 = sol_mask_cen
            image_array[:, :, :][sol_mask_cen] = [0, 0, 0]
            #        image_array[:,:,:][]

            ##-------Calculate Sky Index SI and Brightness Index BI------------Based on Letu et al. (2014)-------------------------------------------------
            if debugger == True:
                print("Calculating Sky Index SI and Brightness Index BI")

            image_array_f = image_array.astype(float)

            SI = ((image_array_f[:, :, 2]) -
                  (image_array_f[:, :, 0])) / (((image_array_f[:, :, 2]) +
                                                (image_array_f[:, :, 0])))
            where_are_NaNs = np.isnan(SI)
            SI[where_are_NaNs] = 1

            mask_sol1 = SI < 0.1
            Radius = 990 * scale
            sol_mask_double = x**2 + y**2 <= Radius**2
            mask_sol1 = np.logical_and(mask_sol1, ~sol_mask_double)
            image_array[:, :, :][mask_sol1] = [255, 0, 0]

            ###-------------Include area around the sun----------------------------------------------------------------------------------------------------
            if debugger == True:
                print("Including area around the sun")

            y, x = np.ogrid[-y_sol_cen:y_size - y_sol_cen,
                            -x_sol_cen:x_size - x_sol_cen]
            sol_mask = x**2 + y**2 <= Radius_sol**2
            sol_mask_cen = x**2 + y**2 <= Radius_sol_center**2
            sol_mask_cen = np.logical_and(sol_mask_cen, sol_mask)

            Radius_sol = size * 100 * 2
            sol_mask = x**2 + y**2 <= Radius_sol**2
            mask2 = np.logical_and(~sol_mask_cen, sol_mask)

            image_array_c = copy.deepcopy(
                image_array
            )  #duplicating array: one for counting one for printing a colored image

            time3 = clock()

            for j in range(size):
                Radius_sol = j * 10 * scale
                sol_mask = (x * x) + (y * y) <= Radius_sol * Radius_sol
                mask2 = np.logical_and(~sol_mask_cen, sol_mask)
                sol_mask_cen = np.logical_or(sol_mask, sol_mask_cen)

                mask3 = SI < parameter[j]
                mask3 = np.logical_and(mask2, mask3)
                image_array_c[mask3] = [255, 0, 0]
                image_array[mask3] = [255, 300 - 3 * j, 0]

            time4 = clock()
            #        print 'Schleifenzeit:', time4-time3
            ##---------Count red pixel(clouds) and blue-green pixel(sky)-------------------------------------------------------------------------------------------
            if debugger == True:
                print("Counting red pixel for sky and blue for clouds")

            c_mask = np.logical_and(~sol_mask_cen1, mask)
            c_array = (image_array_c[:, :, 0] + image_array_c[:, :, 1] +
                       image_array_c[:, :, 2])  #array just for the counting
            Count1 = np.shape(np.where(c_array == 255))[1]
            Count2 = np.shape(np.where(c_mask == True))[1]

            CloudinessPercent = (100 / float(Count2) * float(Count1))
            CloudinessSynop = int(round(8 * (float(Count1) / float(Count2))))

            image = Image.fromarray(image_array.astype(np.uint8))

            #----------Mirror Image-----------------------------
            image = ImageOps.mirror(image)  #Mirror Image back
            #---------Add Text-----------------------------------
            if debugger == True:
                print("Adding text")

            sza = "{:5.1f}".format(sza_orig)
            azimuth = "{:5.1f}".format(azi_orig)
            CloudinessPercent = "{:5.1f}".format(CloudinessPercent)

            #            draw = ImageDraw.Draw(image)
            #            draw.text((20*scale, 20*scale),"BCO All-Sky Camera",(255,255,255),font=font)
            #            draw.text((20*scale, 200*scale),Hour_str+":"+Minute_str+' UTC',(255,255,255),font=font)
            #
            #            draw.text((20*scale, 1700*scale),"SZA = "+str(sza)+u'\u00B0',(255,255,255),font=font)
            #            draw.text((20*scale, 1820*scale),"Azimuth = "+str(azimuth)+u'\u00B0',(255,255,255),font=font)
            #
            #            draw.text((1940*scale, 1700*scale),"Cloudiness: ",(255,255,255),font=font)
            #            draw.text((1930*scale, 1820*scale),str(CloudinessPercent)+'%   '+ str(CloudinessSynop)+'/8',(255,255,255),font=font)
            #
            #            draw.text((1990*scale, 20*scale),Day_str+'.'+Month_str+'.20'+Year_str,(255,255,255),font=font)

            #-------------Save values to csv-File---------------------------------------
            #            if debugger == True:
            #                print "Saving values to csv-File"

            #            EpochTime=(datetime.datetime(2000+Year,Month,Day,Hour,Minute,Second) - datetime.datetime(1970,1,1)).total_seconds()
            #            f.write(str(EpochTime)+', '+Hour_str+':'+Minute_str+', '+str(sza)+', '+str(azimuth)+', '+str(CloudinessPercent)+', '+str(CloudinessSynop)+'\n')
            #-------------Save picture--------------------------------------------------
            if Save_image == True:
                if debugger == True:
                    print("saving picture")

                image = convert(InputFile, image, OutputPath)
                image.save(OutputPath + Year_str + Month_str + Day_str + '_' +
                           Hour_str + Minute_str + Second_str + '_ASCA.jpg')

            #image.show()
            time2 = clock()
            time = time2 - time1
            cloudiness_value.append(CloudinessPercent)
            ASCAtime.append((datetime.datetime(Year + 2000, Month, Day, Hour,
                                               Minute, Second)))

            cloudmask = [c_array == 255]
            cloudmask = cloudmask[0] * 1
            cloudmask[np.where(c_mask == False)] = -1
            clodmask = np.fliplr(cloudmask)
            cloudmasks.append(cloudmask)

#               print "Berechnungszeit: ", time
    return cloudiness_value, ASCAtime, cloudmasks, set_scale_factor
コード例 #25
0
ファイル: test_pvsystem.py プロジェクト: Lamaf/pvlib-python
import datetime

import numpy as np
import pandas as pd

from nose.tools import assert_equals, assert_almost_equals

from pvlib import tmy
from pvlib import pvsystem
from pvlib import clearsky
from pvlib import irradiance
from pvlib import atmosphere
from pvlib import solarposition
from pvlib.location import Location

tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
times = pd.date_range(start=datetime.datetime(2014, 1, 1),
                      end=datetime.datetime(2014, 1, 2),
                      freq='1Min')
ephem_data = solarposition.get_solarposition(times, tus, method='pyephem')
irrad_data = clearsky.ineichen(times, tus, solarposition_method='pyephem')
aoi = irradiance.aoi(0, 0, ephem_data['apparent_zenith'],
                     ephem_data['apparent_azimuth'])
am = atmosphere.relativeairmass(ephem_data.apparent_zenith)

pvlib_abspath = os.path.dirname(os.path.abspath(inspect.getfile(tmy)))

tmy3_testfile = os.path.join(pvlib_abspath, 'data', '703165TY.csv')
tmy2_testfile = os.path.join(pvlib_abspath, 'data', '12839.tm2')

tmy3_data, tmy3_metadata = tmy.readtmy3(tmy3_testfile)
コード例 #26
0
def reindl (lat,lon, times, ghi, extra_i, zenith):
    """
    this function calculates dhi, dni and the clearness index kt
    from ghi, extraterrestial_irradiance and the solar zenith propsed by Merra2
    [1]

    Parameters
    -----------
        ghi:  numeric pd.Series or sequence
        global horizontal irradiance [W/m^2]

        zenith: numeric pd.Series or sequence
        real solar zenith angle (not apparent) in [°]

        extra_i: numeric pd.Series or sequence
        extraterrestial irradiance [W/m^2] == top-of-the-atmosphere irradiance (TOA) == SWTDN (Merra-2)

    Returns
    -------
    data : OrderedDict or DataFrame
        Contains the following keys/columns:

            * ``dni``: the modeled direct normal irradiance in W/m^2.
            * ``dhi``: the modeled diffuse horizontal irradiance in
              W/m^2.
            * ``kt``: Ratio of global to extraterrestrial irradiance
              on a horizontal plane.

    References
    -----------
        [1] Reindl et al. (1990): Diffuse fraction correlations

    """

    i0_h = extra_i * tools.cosd(zenith)

    kt = ghi / i0_h

    kt = np.maximum(kt, 0)
    kt.fillna(0, inplace=True)

    # for kt outside the boundaries, set diffuse fraction to zero
    df = 0.0

    # for 0<kt<=0.3  set diffuse fraction
    df = np.where((kt > 0) & (kt <= 0.3), 1.02 - 0.254 * kt + 0.0123 * tools.cosd(zenith), df)

    # for 0.3<kt<0.78  and df>=0.1, set diffuse fraction
    df = np.where((kt > 0.3) & (kt <= 0.78) & (1.4 - 1.794 * kt + 0.177 * tools.cosd(zenith) >= 0.1),
                  1.4 - 1.794 * kt + 0.177 * tools.cosd(zenith), df)
    # for kt > 0.78 and df>=0.1
    df = np.where((kt > 0.78) & (0.486 * kt + 0.182 * tools.cosd(zenith) >= 0.1),
                  0.486 * kt + 0.182 * tools.cosd(zenith), df)

    # remove extreme values
    df = np.where((df < 0.9) & (kt < 0.2), 0, df)
    df = np.where((df > 0.8) & (kt > 0.6), 0, df)
    df = np.where((df > 1), 0, df)
    df = np.where(((ghi - extra_i) >= 0), 0, df)

    dhi = df * ghi

    dni = irradiance.dni(ghi, dhi, zenith,
                         clearsky_dni=Location(lat, lon).get_clearsky(times).dni,
                         zenith_threshold_for_zero_dni=88.0,
                         clearsky_tolerance=1.1,
                         zenith_threshold_for_clearsky_limit=64)

    data = OrderedDict()
    data['dni'] = dni
    data['dhi'] = dhi
    data['kt'] = kt

    if isinstance(dni, pd.Series):
        data = pd.DataFrame(data)

    return data
# In[511]:


### CONFIGURE RUNS
run_train = False # Disables training & processing of train set; Set it to True for the first time to create a model
#test_location = "Bondville" #Folder name
test_location = "Boulder" #Folder name
#test_location = "Desert_Rock" #Folder name
#test_location = "Fort_Peck" #Folder name
#test_location = "Goodwin_Creek" #Folder name
#test_location = "Penn_State" #Folder name
#test_location = "Sioux_Falls" #Folder name

# All_locations
#bvl = Location(40.1134,-88.3695, 'US/Central', 217.932, 'Bondville')
bvl = Location(40.0150,-105.2705, 'US/Mountain', 1655.064, 'Boulder')
#bvl = Location(36.621,-116.043, 'US/Pacific', 1010.1072, 'Desert Rock')
# bvl = Location(48,-106.449, 'US/Mountain', 630.0216, 'Fort Peck')
# bvl = Location(34.2487,-89.8925, 'US/Central', 98, 'Goodwin Creek')
# bvl = Location(40.798,-77.859, 'US/Eastern', 351.74, 'Penn State')
# bvl = Location(43.544,-96.73, 'US/Central', 448.086, 'Sioux Falls')



test_year = "2009"


# TEST year 2009
times = pd.DatetimeIndex(start='2009-01-01', end='2010-01-01', freq='1min',tz=bvl.tz)   # 12 months
#  TEST year 2015
#times = pd.DatetimeIndex(start='2015-01-01', end='2016-01-01', freq='1min',tz=bvl.tz)   # 12 months 
コード例 #28
0
def find_clearsky_poa(df,
                      lat,
                      lon,
                      irradiance_poa_key='irradiance_poa_o_###',
                      mounting='fixed',
                      tilt=0,
                      azimuth=180,
                      altitude=0):
    loc = Location(lat, lon, altitude=altitude)

    CS = loc.get_clearsky(df.index)

    df['csghi'] = CS.ghi
    df['csdhi'] = CS.dhi
    df['csdni'] = CS.dni

    if mounting.lower() == "fixed":
        sun = get_solarposition(df.index, lat, lon)

        fixedpoa = get_total_irradiance(tilt, azimuth, sun.zenith, sun.azimuth,
                                        CS.dni, CS.ghi, CS.dhi)

        df['cspoa'] = fixedpoa.poa_global

    if mounting.lower() == "tracking":
        sun = get_solarposition(df.index, lat, lon)

        # default to axis_tilt=0 and axis_azimuth=180

        tracker_data = singleaxis(sun.apparent_zenith,
                                  sun.azimuth,
                                  axis_tilt=tilt,
                                  axis_azimuth=azimuth,
                                  max_angle=50,
                                  backtrack=True,
                                  gcr=0.35)

        track = get_total_irradiance(tracker_data['surface_tilt'],
                                     tracker_data['surface_azimuth'],
                                     sun.zenith, sun.azimuth, CS.dni, CS.ghi,
                                     CS.dhi)

        df['cspoa'] = track.poa_global

    # the following code is assuming clear sky poa has been generated per pvlib, aligned in the same
    # datetime index, and daylight savings or any time shifts were previously corrected
    # the inputs below were tuned for POA at a 15 minute frequency
    # note that detect_clearsky has a scaling factor but I still got slightly different results when I scaled measured poa first

    df['poa'] = df[irradiance_poa_key] / df[irradiance_poa_key].quantile(
        0.98) * df.cspoa.quantile(0.98)

    # inputs for detect_clearsky

    measured = df.poa.copy()
    clear = df.cspoa.copy()
    dur = 60
    lower_line_length = -41.416
    upper_line_length = 77.789
    var_diff = .00745
    mean_diff = 80
    max_diff = 90
    slope_dev = 3

    is_clear_results = detect_clearsky(measured.values,
                                       clear.values,
                                       df.index,
                                       dur,
                                       mean_diff,
                                       max_diff,
                                       lower_line_length,
                                       upper_line_length,
                                       var_diff,
                                       slope_dev,
                                       return_components=True)

    clearSeries = pd.Series(index=df.index, data=is_clear_results[0])

    clearSeries = clearSeries.reindex(index=df.index, method='ffill', limit=3)

    return clearSeries
### CONFIGURE RUNS
run_train = True  # Disables training & processing of train set; Set it to True for the first time to create a model
#test_location = "Bondville" #Folder name
#test_location = "Boulder" #Folder name
#test_location = "Desert_Rock" #Folder name
test_location = "Fort_Peck"  #Folder name
#test_location = "Goodwin_Creek" #Folder name
#test_location = "Penn_State" #Folder name
#test_location = "Sioux_Falls" #Folder name

# All_locations
#bvl = Location(40.1134,-88.3695, 'US/Central', 217.932, 'Bondville')
#bvl = Location(40.0150,-105.2705, 'US/Mountain', 1655.064, 'Boulder')
#bvl = Location(36.621,-116.043, 'US/Pacific', 1010.1072, 'Desert Rock')
bvl = Location(48, -106.449, 'US/Mountain', 630.0216, 'Fort Peck')
# bvl = Location(34.2487,-89.8925, 'US/Central', 98, 'Goodwin Creek')
# bvl = Location(40.798,-77.859, 'US/Eastern', 351.74, 'Penn State')
# bvl = Location(43.544,-96.73, 'US/Central', 448.086, 'Sioux Falls')

test_year = "2009"

# TEST year 2009
times = pd.DatetimeIndex(start='2009-01-01',
                         end='2010-01-01',
                         freq='1min',
                         tz=bvl.tz)  # 12 months
#  TEST year 2015
#times = pd.DatetimeIndex(start='2015-01-01', end='2016-01-01', freq='1min',tz=bvl.tz)   # 12 months
# TEST year 2016
#times = pd.DatetimeIndex(start='2016-01-01', end='2017-01-01', freq='1min',tz=bvl.tz)   # 12 months
コード例 #30
0
def solar_noon(lat, lon, elev):
	#calculates solar noon given a lat, lon, and elevation
	#all these functions lag in performance because we must instantiate the object for location and times every time....
	current_location = Location(lat, lon, 'America/Denver', elev, 'GYE') 
コード例 #31
0
def calc_ghi(datetime, latitudes, longitudes, wndw):
    '''
    Calculate the GHI at a certain datetime for certain combinations of latitudes
    and longitudes. The function first calculates the clear-sky index at the
    datetime. Then, the matrix indices that belong to the coordinates of the
    stations are calculated, which is used to select the right clear-sky index.
    Then, the function loops over the locations in the dictionary of latitudes
    and calculates the clear-sky irradiance and then the GHI measured by the
    satellite. The GHI is then combined with the measured GHI, which finally
    results in a data frame with one row and columns: Az_loc, Elev_loc,
    G_loc, G_sat_loc.
    Arguments:
    - datetime: pandas datetime Timestamp.
    - lats and lons: dictionaries containing the coordinates of sites in
      the coordinate reference system WGS84 (standard coordinate system).
    - wndw: scalar with the training window length (for validation purposes)
    Output:
    - Data frame with one row and columns: Az_loc, Elev_loc, G_loc, G_sat_loc.
    '''
    # Calculate clear-sky index on date
    csi = calc_clearskyIndex(datetime, wndw)  # Note that the output is (y,x)
    if csi is None:  # Check whether azimuth is lower than 85, skips if not.
        return
    else:
        # Find indices closest to lats and lons
        indices = find_indices(latitudes, longitudes)
        ys = indices['y'].to_list()  # Get the y-coordinates
        xs = indices['x'].to_list()  # Get the x-coordinates
        lst_of_csi = csi[ys, xs]
        d = {'csi': lst_of_csi, 'Location': list(latitudes.keys())}
        csi_at_loc = pd.DataFrame(d)
        # I need to use a rolling window for the clear sky irradiance because the image is
        # taken in an interval of 15 minutes while the clear-sky irradiance is instantaneous.
        # Current time:
        time_idx = pd.date_range(start=datetime,
                                 freq='15min',
                                 periods=1,
                                 tz='UTC').round('15min')
        # Time interval of the image:
        time_int = pd.date_range(end=datetime,
                                 freq='1min',
                                 periods=15,
                                 tz='UTC')  #.round('15min')
        lst_of_dfs = []
        for loc in latitudes.keys():  # Loop over the locations
            lat, lon = latitudes[loc], longitudes[loc]
            current_loc = Location(lat, lon, 'UTC', 0)
            clearsky = current_loc.get_clearsky(time_int).resample(
                '15min', closed='right',
                label='right').mean()  # DataFrame with two rows.
            clearsky = clearsky.loc[time_idx][
                'ghi']  # DataFrame with one row, take ghi
            ghi_sat = clearsky.values * csi_at_loc.loc[csi_at_loc['Location']
                                                       == loc]['csi'].values
            # Read the station specific columns:
            ghi_m_loc = ghi_m.filter(
                regex=(loc))  # ghi_m is a .txt file with measurements
            ghi_loc = ghi_m_loc.loc[time_idx]
            ghi_loc['G_sat_{}'.format(loc)] = ghi_sat
            ghi_loc['csi_{}'.format(loc)] = csi_at_loc.loc[
                csi_at_loc['Location'] == loc]['csi'].values
            lst_of_dfs.append(ghi_loc)
        df = pd.concat(lst_of_dfs, axis=1)
        return (df)
コード例 #32
0
def fc_ghi_ps(issuetime, latitudes, longitudes, wndw, K):
    '''
    GHI persistence forecasts starting at a certain issue time for certain combinations
    of latitudes and longitudes. The function computes the clear sky index at the
    forecast issue time and the clear sky irradiance at the forecast valid time.
    The GHI is then combined with the measured GHI, which finally
    results in a data frame with K rows and columns: Az_loc, Elev_loc,
    G_loc, G_ps_loc.
    Arguments:
    - issuetime: pandas datetime Timestamp.
    - lats and lons: dictionaries containing the coordinates of sites in
      the coordinate reference system WGS84 (standard coordinate system).
    - wndw:
    - K: vector of integers representing the forecast horizons (1,...,K).
    Output:
    - Data frame with K rows and columns: Az_loc, Elev_loc, G_loc, G_ps_loc.
    '''
    checked_date = define_trailing_and_target(issuetime, wndw)
    if checked_date is None:  # Check whether azimuth is lower than 85, skips if not.
        return
    else:
        lst_of_dfs_outer = []
        #issuetime = pd.date_range(start=issuetime, freq='15min', periods=1, tz='UTC').round('15min')
        for k in K:
            # Forecast valid time:
            valid_time = issuetime + pd.Timedelta(15 * k, unit='min')
            valid_time_as_rng = pd.date_range(start=valid_time,
                                              freq='15min',
                                              periods=1,
                                              tz='UTC').round('15min')
            # Time interval of the image:
            time_int = pd.date_range(end=valid_time,
                                     freq='1min',
                                     periods=15,
                                     tz='UTC')  #.round('15min')
            # Convert issue time to range for get_clearsky
            issuetime_as_rng = pd.date_range(start=issuetime,
                                             freq='15min',
                                             periods=1,
                                             tz='UTC').round('15min')

            lst_of_dfs_inner = []
            for loc in latitudes.keys():  # Loop over the locations
                lat, lon = latitudes[loc], longitudes[loc]
                current_loc = Location(lat, lon, 'UTC', 0)
                # Read the station specific columns:
                ghi_m_loc = ghi_m.filter(regex=(loc))
                # Get the clear sky irradiance at valid time to compute GHI_ps:
                clearsky_valid = current_loc.get_clearsky(time_int).resample(
                    '15min', closed='right', label='right').mean()
                clearsky_valid = clearsky_valid.loc[
                    valid_time_as_rng]  # DataFrame with one row, take ghi
                # Get the clear sky irradiance at issue time to compute persistence:
                clearsky_issue = current_loc.get_clearsky(issuetime_as_rng)
                # Get the measured data at the issue time
                ghi_m_loc_issue = ghi_m_loc.loc[issuetime_as_rng]
                # Compute persistence forecast:
                ghi_ps_fc = ghi_m_loc_issue['G_{}'.format(
                    loc)].values / clearsky_issue[
                        'ghi'].values * clearsky_valid['ghi'].values
                ghi_loc = ghi_m_loc.loc[valid_time_as_rng]
                ghi_loc['G_ps_{}'.format(loc)] = ghi_ps_fc
                lst_of_dfs_inner.append(ghi_loc)
            df = pd.concat(
                lst_of_dfs_inner,
                axis=1)  # DF with 1 row and column for each location
            lst_of_dfs_outer.append(df)
        fc = pd.concat(lst_of_dfs_outer, axis=0).round(
            2)  # DF with K rows and column for each location
        fname = os.path.join(FORECAST_PATH_PS,
                             issuetime.strftime("%Y%m%dT%H%M") + ".txt")
        fc.to_csv(fname, sep="\t")
コード例 #33
0
def fc_ghi(issuetime, latitudes, longitudes, wndw, K):
    '''
    Forecast the GHI starting at a certain issue time for certain combinations
    of latitudes and longitudes. The function first generates a forecast for
    1,...,K forecast horizons of the clear-sky index. Then, the matrix indices
    that belong to the coordinates of the stations are calculated, which is used
    to select the right clear-sky index.
    Then, the function loops over the locations in the dictionary of latitudes
    and calculates the clear-sky irradiance and then the GHI measured by the
    satellite. The GHI is then combined with the measured GHI, which finally
    results in a data frame with K rows and columns: Az_loc, Elev_loc,
    G_loc, G_sat_loc.
    Arguments:
    - issuetime: pandas datetime Timestamp.
    - lats and lons: dictionaries containing the coordinates of sites in
      the coordinate reference system WGS84 (standard coordinate system).
    - wndw: scalar with the training window length
    - K: vector of integers representing the forecast horizons (1,...,K).
    Output:
    - Data frame with K rows and columns: Az_loc, Elev_loc, G_loc, G_sat_loc.
    '''
    # Calculate clear-sky index on date
    fcs = fc_clearskyIndex(issuetime, wndw, K)  # list of K numpy arrays
    if fcs is None:  # Check whether azimuth is lower than 85, skips if not.
        return
    else:
        # Find indices closest to lats and lons
        indices = find_indices(latitudes, longitudes)
        ys = indices['y'].to_list()  # Get the y-coordinates
        xs = indices['x'].to_list()  # Get the x-coordinates
        lst_of_dfs_outer = []
        for k in K:
            lst_of_csi = fcs[k - 1][ys,
                                    xs]  # Take the forecasts at the indices
            d = {'csi': lst_of_csi, 'Location': list(latitudes.keys())}
            csi_at_loc = pd.DataFrame(d)
            # I need to use a rolling window for the clear sky irradiance because the image is
            # taken in an interval of 15 minutes while the clear-sky irradiance is instantaneous.
            # Valid time of forecast:
            valid_time = issuetime + pd.Timedelta(15 * k, unit='min')
            time_idx = pd.date_range(start=valid_time,
                                     freq='15min',
                                     periods=1,
                                     tz='UTC').round('15min')
            # Time interval of the image:
            time_int = pd.date_range(end=valid_time,
                                     freq='1min',
                                     periods=15,
                                     tz='UTC')  #.round('15min')
            lst_of_dfs_inner = []
            for loc in latitudes.keys():  # Loop over the locations
                lat, lon = latitudes[loc], longitudes[loc]
                current_loc = Location(lat, lon, 'UTC', 0)
                clearsky = current_loc.get_clearsky(time_int).resample(
                    '15min', closed='right',
                    label='right').mean()  # DataFrame with two rows.
                clearsky = clearsky.loc[time_idx][
                    'ghi']  # DataFrame with one row, take ghi
                ghi_sat = clearsky.values * csi_at_loc.loc[
                    csi_at_loc['Location'] == loc]['csi'].values
                # Read the station specific columns:
                ghi_m_loc = ghi_m.filter(
                    regex=(loc))  # ghi_m is a .txt file with measurements
                ghi_loc = ghi_m_loc.loc[time_idx]
                ghi_loc['G_sat_{}'.format(loc)] = ghi_sat
                #ghi_loc['csi_{}'.format(loc)] = csi_at_loc.loc[csi_at_loc['Location'] == loc]['csi'].values
                lst_of_dfs_inner.append(ghi_loc)
            df = pd.concat(
                lst_of_dfs_inner,
                axis=1)  # DF with 1 row and column for each location
            lst_of_dfs_outer.append(df)
        fc = pd.concat(lst_of_dfs_outer, axis=0).round(
            2)  # DF with K rows and column for each location
        #dir_name = os.path.join(FORECAST_PATH, issuetime.strftime("%Y/%m/%d/%H/%M"))
        #if not os.path.exists(dir_name):
        #    os.makedirs(dir_name)
        #fc.to_csv(os.path.join(dir_name,"fc.txt"), sep="\t")
        fname = os.path.join(FORECAST_PATH,
                             issuetime.strftime("%Y%m%dT%H%M") + ".txt")
        fc.to_csv(fname, sep="\t")
コード例 #34
0
import pytest
from numpy.testing import assert_almost_equal, assert_allclose

from pandas.util.testing import assert_frame_equal, assert_series_equal

from pvlib.location import Location
from pvlib import clearsky
from pvlib import solarposition
from pvlib import irradiance
from pvlib import atmosphere

from conftest import requires_ephem, requires_numba, needs_numpy_1_10

# setup times and location to be tested.
tus = Location(32.2, -111, 'US/Arizona', 700)

# must include night values
times = pd.date_range(start='20140624', freq='6H', periods=4, tz=tus.tz)

ephem_data = solarposition.get_solarposition(times,
                                             tus.latitude,
                                             tus.longitude,
                                             method='nrel_numpy')

irrad_data = tus.get_clearsky(times, model='ineichen', linke_turbidity=3)

dni_et = irradiance.extraradiation(times.dayofyear)

ghi = irrad_data['ghi']
コード例 #35
0
from pandas.util.testing import assert_series_equal, assert_frame_equal
from numpy.testing import assert_allclose

from pvlib import tmy
from pvlib import pvsystem
from pvlib import clearsky
from pvlib import irradiance
from pvlib import atmosphere
from pvlib import solarposition
from pvlib.location import Location

from conftest import needs_numpy_1_10, requires_scipy

latitude = 32.2
longitude = -111
tus = Location(latitude, longitude, 'US/Arizona', 700, 'Tucson')
times = pd.date_range(start=datetime.datetime(2014, 1, 1),
                      end=datetime.datetime(2014, 1, 2),
                      freq='1Min')
ephem_data = solarposition.get_solarposition(times,
                                             latitude=latitude,
                                             longitude=longitude,
                                             method='nrel_numpy')
am = atmosphere.relativeairmass(ephem_data.apparent_zenith)
irrad_data = clearsky.ineichen(ephem_data['apparent_zenith'],
                               am,
                               linke_turbidity=3)
aoi = irradiance.aoi(0, 0, ephem_data['apparent_zenith'],
                     ephem_data['azimuth'])

meta = {
コード例 #36
0
ファイル: test_irradiance.py プロジェクト: dpete2008/Sandia
import pytest
from numpy.testing import assert_almost_equal, assert_allclose

from pandas.util.testing import assert_frame_equal, assert_series_equal

from pvlib.location import Location
from pvlib import clearsky
from pvlib import solarposition
from pvlib import irradiance
from pvlib import atmosphere

from conftest import requires_ephem, requires_numba, needs_numpy_1_10

# setup times and location to be tested.
tus = Location(32.2, -111, 'US/Arizona', 700)

# must include night values
times = pd.date_range(start='20140624', freq='6H', periods=4, tz=tus.tz)

ephem_data = solarposition.get_solarposition(
    times, tus.latitude, tus.longitude, method='nrel_numpy')

irrad_data = tus.get_clearsky(times, model='ineichen', linke_turbidity=3)

dni_et = irradiance.extraradiation(times.dayofyear)

ghi = irrad_data['ghi']


# setup for et rad test. put it here for readability
コード例 #37
0
ファイル: test_location.py プロジェクト: caskeep/pvlib-python
def test_location_print_pytz():
    tus = Location(32.2, -111, aztz, 700, 'Tucson')
    expected_str = 'Tucson: latitude=32.2, longitude=-111, tz=US/Arizona, altitude=700'
    assert tus.__str__() == expected_str
コード例 #38
0
def test_location_tz(tz):
    Location(32.2, -111, tz)
コード例 #39
0
class ForecastModel(object):
    """
    An object for querying and holding forecast model information for
    use within the pvlib library.

    Simplifies use of siphon library on a THREDDS server.

    Parameters
    ----------
    model_type: string
        UNIDATA category in which the model is located.
    model_name: string
        Name of the UNIDATA forecast model.
    set_type: string
        Model dataset type.

    Attributes
    ----------
    access_url: string
        URL specifying the dataset from data will be retrieved.
    base_tds_url : string
        The top level server address
    catalog_url : string
        The url path of the catalog to parse.
    data: pd.DataFrame
        Data returned from the query.
    data_format: string
        Format of the forecast data being requested from UNIDATA.
    dataset: Dataset
        Object containing information used to access forecast data.
    dataframe_variables: list
        Model variables that are present in the data.
    datasets_list: list
        List of all available datasets.
    fm_models: Dataset
        TDSCatalog object containing all available
        forecast models from UNIDATA.
    fm_models_list: list
        List of all available forecast models from UNIDATA.
    latitude: list
        A list of floats containing latitude values.
    location: Location
        A pvlib Location object containing geographic quantities.
    longitude: list
        A list of floats containing longitude values.
    lbox: boolean
        Indicates the use of a location bounding box.
    ncss: NCSS object
        NCSS
    model_name: string
        Name of the UNIDATA forecast model.
    model: Dataset
        A dictionary of Dataset object, whose keys are the name of the
        dataset's name.
    model_url: string
        The url path of the dataset to parse.
    modelvariables: list
        Common variable names that correspond to queryvariables.
    query: NCSS query object
        NCSS object used to complete the forecast data retrival.
    queryvariables: list
        Variables that are used to query the THREDDS Data Server.
    time: DatetimeIndex
        Time range.
    variables: dict
        Defines the variables to obtain from the weather
        model and how they should be renamed to common variable names.
    units: dict
        Dictionary containing the units of the standard variables
        and the model specific variables.
    vert_level: float or integer
        Vertical altitude for query data.
    """

    access_url_key = 'NetcdfSubset'
    catalog_url = 'http://thredds.ucar.edu/thredds/catalog.xml'
    base_tds_url = catalog_url.split('/thredds/')[0]
    data_format = 'netcdf'

    units = {
        'temp_air': 'C',
        'wind_speed': 'm/s',
        'ghi': 'W/m^2',
        'ghi_raw': 'W/m^2',
        'dni': 'W/m^2',
        'dhi': 'W/m^2',
        'total_clouds': '%',
        'low_clouds': '%',
        'mid_clouds': '%',
        'high_clouds': '%'
    }

    def __init__(self, model_type, model_name, set_type, vert_level=None):
        self.model_type = model_type
        self.model_name = model_name
        self.set_type = set_type
        self.connected = False
        self.vert_level = vert_level

    def connect_to_catalog(self):
        self.catalog = TDSCatalog(self.catalog_url)
        self.fm_models = TDSCatalog(
            self.catalog.catalog_refs[self.model_type].href)
        self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys()))

        try:
            model_url = self.fm_models.catalog_refs[self.model_name].href
        except ParseError:
            raise ParseError(self.model_name + ' model may be unavailable.')

        try:
            self.model = TDSCatalog(model_url)
        except HTTPError:
            try:
                self.model = TDSCatalog(model_url)
            except HTTPError:
                raise HTTPError(self.model_name + ' model may be unavailable.')

        self.datasets_list = list(self.model.datasets.keys())
        self.set_dataset()
        self.connected = True

    def __repr__(self):
        return '{}, {}'.format(self.model_name, self.set_type)

    def set_dataset(self):
        '''
        Retrieves the designated dataset, creates NCSS object, and
        creates a NCSS query object.
        '''

        keys = list(self.model.datasets.keys())
        labels = [item.split()[0].lower() for item in keys]
        if self.set_type == 'best':
            self.dataset = self.model.datasets[keys[labels.index('best')]]
        elif self.set_type == 'latest':
            self.dataset = self.model.datasets[keys[labels.index('latest')]]
        elif self.set_type == 'full':
            self.dataset = self.model.datasets[keys[labels.index('full')]]

        self.access_url = self.dataset.access_urls[self.access_url_key]
        self.ncss = NCSS(self.access_url)
        self.query = self.ncss.query()

    def set_query_latlon(self):
        '''
        Sets the NCSS query location latitude and longitude.
        '''

        if (isinstance(self.longitude, list)
                and isinstance(self.latitude, list)):
            self.lbox = True
            # west, east, south, north
            self.query.lonlat_box(self.longitude[0], self.longitude[1],
                                  self.latitude[0], self.latitude[1])
        else:
            self.lbox = False
            self.query.lonlat_point(self.longitude, self.latitude)

    def set_location(self, time, latitude, longitude):
        '''
        Sets the location for the query.

        Parameters
        ----------
        time: datetime or DatetimeIndex
            Time range of the query.
        '''
        if isinstance(time, datetime.datetime):
            tzinfo = time.tzinfo
        else:
            tzinfo = time.tz

        if tzinfo is None:
            self.location = Location(latitude, longitude)
        else:
            self.location = Location(latitude, longitude, tz=tzinfo)

    def get_data(self,
                 latitude,
                 longitude,
                 start,
                 end,
                 vert_level=None,
                 query_variables=None,
                 close_netcdf_data=True):
        """
        Submits a query to the UNIDATA servers using Siphon NCSS and
        converts the netcdf data to a pandas DataFrame.

        Parameters
        ----------
        latitude: float
            The latitude value.
        longitude: float
            The longitude value.
        start: datetime or timestamp
            The start time.
        end: datetime or timestamp
            The end time.
        vert_level: None, float or integer, default None
            Vertical altitude of interest.
        query_variables: None or list, default None
            If None, uses self.variables.
        close_netcdf_data: bool, default True
            Controls if the temporary netcdf data file should be closed.
            Set to False to access the raw data.

        Returns
        -------
        forecast_data : DataFrame
            column names are the weather model's variable names.
        """

        if not self.connected:
            self.connect_to_catalog()

        if vert_level is not None:
            self.vert_level = vert_level

        if query_variables is None:
            self.query_variables = list(self.variables.values())
        else:
            self.query_variables = query_variables

        self.latitude = latitude
        self.longitude = longitude
        self.set_query_latlon()  # modifies self.query
        self.set_location(start, latitude, longitude)

        self.start = start
        self.end = end
        self.query.time_range(self.start, self.end)

        if self.vert_level is not None:
            self.query.vertical_level(self.vert_level)

        self.query.variables(*self.query_variables)
        self.query.accept(self.data_format)

        self.netcdf_data = self.ncss.get_data(self.query)

        # might be better to go to xarray here so that we can handle
        # higher dimensional data for more advanced applications
        self.data = self._netcdf2pandas(self.netcdf_data, self.query_variables,
                                        self.start, self.end)

        if close_netcdf_data:
            self.netcdf_data.close()

        return self.data

    def process_data(self, data, **kwargs):
        """
        Defines the steps needed to convert raw forecast data
        into processed forecast data. Most forecast models implement
        their own version of this method which also call this one.

        Parameters
        ----------
        data: DataFrame
            Raw forecast data

        Returns
        -------
        data: DataFrame
            Processed forecast data.
        """
        data = self.rename(data)
        return data

    def get_processed_data(self, *args, **kwargs):
        """
        Get and process forecast data.

        Parameters
        ----------
        *args: positional arguments
            Passed to get_data
        **kwargs: keyword arguments
            Passed to get_data and process_data

        Returns
        -------
        data: DataFrame
            Processed forecast data
        """
        return self.process_data(self.get_data(*args, **kwargs), **kwargs)

    def rename(self, data, variables=None):
        """
        Renames the columns according the variable mapping.

        Parameters
        ----------
        data: DataFrame
        variables: None or dict, default None
            If None, uses self.variables

        Returns
        -------
        data: DataFrame
            Renamed data.
        """
        if variables is None:
            variables = self.variables
        return data.rename(columns={y: x for x, y in variables.items()})

    def _netcdf2pandas(self, netcdf_data, query_variables, start, end):
        """
        Transforms data from netcdf to pandas DataFrame.

        Parameters
        ----------
        data: netcdf
            Data returned from UNIDATA NCSS query.
        query_variables: list
            The variables requested.
        start: Timestamp
            The start time
        end: Timestamp
            The end time

        Returns
        -------
        pd.DataFrame
        """
        # set self.time
        try:
            time_var = 'time'
            self.set_time(netcdf_data.variables[time_var])
        except KeyError:
            # which model does this dumb thing?
            time_var = 'time1'
            self.set_time(netcdf_data.variables[time_var])

        data_dict = {}
        for key, data in netcdf_data.variables.items():
            # if accounts for possibility of extra variable returned
            if key not in query_variables:
                continue
            squeezed = data[:].squeeze()
            if squeezed.ndim == 1:
                data_dict[key] = squeezed
            elif squeezed.ndim == 2:
                for num, data_level in enumerate(squeezed.T):
                    data_dict[key + '_' + str(num)] = data_level
            else:
                raise ValueError('cannot parse ndim > 2')

        data = pd.DataFrame(data_dict, index=self.time)
        # sometimes data is returned as hours since T0
        # where T0 is before start. Then the hours between
        # T0 and start are added *after* end. So sort and slice
        # to remove the garbage
        data = data.sort_index().loc[start:end]
        return data

    def set_time(self, time):
        '''
        Converts time data into a pandas date object.

        Parameters
        ----------
        time: netcdf
            Contains time information.

        Returns
        -------
        pandas.DatetimeIndex
        '''
        times = num2date(time[:].squeeze(), time.units)
        self.time = pd.DatetimeIndex(pd.Series(times), tz=self.location.tz)

    def cloud_cover_to_ghi_linear(self,
                                  cloud_cover,
                                  ghi_clear,
                                  offset=35,
                                  **kwargs):
        """
        Convert cloud cover to GHI using a linear relationship.

        0% cloud cover returns ghi_clear.

        100% cloud cover returns offset*ghi_clear.

        Parameters
        ----------
        cloud_cover: numeric
            Cloud cover in %.
        ghi_clear: numeric
            GHI under clear sky conditions.
        offset: numeric, default 35
            Determines the minimum GHI.
        kwargs
            Not used.

        Returns
        -------
        ghi: numeric
            Estimated GHI.

        References
        ----------
        Larson et. al. "Day-ahead forecasting of solar power output from
        photovoltaic plants in the American Southwest" Renewable Energy
        91, 11-20 (2016).
        """

        offset = offset / 100.
        cloud_cover = cloud_cover / 100.
        ghi = (offset + (1 - offset) * (1 - cloud_cover)) * ghi_clear
        return ghi

    def cloud_cover_to_irradiance_clearsky_scaling(self,
                                                   cloud_cover,
                                                   method='linear',
                                                   **kwargs):
        """
        Estimates irradiance from cloud cover in the following steps:

        1. Determine clear sky GHI using Ineichen model and
           climatological turbidity.
        2. Estimate cloudy sky GHI using a function of
           cloud_cover e.g.
           :py:meth:`~ForecastModel.cloud_cover_to_ghi_linear`
        3. Estimate cloudy sky DNI using the DISC model.
        4. Calculate DHI from DNI and DHI.

        Parameters
        ----------
        cloud_cover : Series
            Cloud cover in %.
        method : str, default 'linear'
            Method for converting cloud cover to GHI.
            'linear' is currently the only option.
        **kwargs
            Passed to the method that does the conversion

        Returns
        -------
        irrads : DataFrame
            Estimated GHI, DNI, and DHI.
        """
        solpos = self.location.get_solarposition(cloud_cover.index)
        cs = self.location.get_clearsky(cloud_cover.index,
                                        model='ineichen',
                                        solar_position=solpos)

        method = method.lower()
        if method == 'linear':
            ghi = self.cloud_cover_to_ghi_linear(cloud_cover, cs['ghi'],
                                                 **kwargs)
        else:
            raise ValueError('invalid method argument')

        dni = disc(ghi, solpos['zenith'], cloud_cover.index)['dni']
        dhi = ghi - dni * np.cos(np.radians(solpos['zenith']))

        irrads = pd.DataFrame({'ghi': ghi, 'dni': dni, 'dhi': dhi}).fillna(0)
        return irrads

    def cloud_cover_to_transmittance_linear(self,
                                            cloud_cover,
                                            offset=0.75,
                                            **kwargs):
        """
        Convert cloud cover to atmospheric transmittance using a linear
        model.

        0% cloud cover returns offset.

        100% cloud cover returns 0.

        Parameters
        ----------
        cloud_cover : numeric
            Cloud cover in %.
        offset : numeric, default 0.75
            Determines the maximum transmittance.
        kwargs
            Not used.

        Returns
        -------
        ghi : numeric
            Estimated GHI.
        """
        transmittance = ((100.0 - cloud_cover) / 100.0) * offset

        return transmittance

    def cloud_cover_to_irradiance_liujordan(self, cloud_cover, **kwargs):
        """
        Estimates irradiance from cloud cover in the following steps:

        1. Determine transmittance using a function of cloud cover e.g.
           :py:meth:`~ForecastModel.cloud_cover_to_transmittance_linear`
        2. Calculate GHI, DNI, DHI using the
           :py:func:`pvlib.irradiance.liujordan` model

        Parameters
        ----------
        cloud_cover : Series

        Returns
        -------
        irradiance : DataFrame
            Columns include ghi, dni, dhi
        """
        # in principle, get_solarposition could use the forecast
        # pressure, temp, etc., but the cloud cover forecast is not
        # accurate enough to justify using these minor corrections
        solar_position = self.location.get_solarposition(cloud_cover.index)
        dni_extra = get_extra_radiation(cloud_cover.index)
        airmass = self.location.get_airmass(cloud_cover.index)

        transmittance = self.cloud_cover_to_transmittance_linear(
            cloud_cover, **kwargs)

        irrads = liujordan(solar_position['apparent_zenith'],
                           transmittance,
                           airmass['airmass_absolute'],
                           dni_extra=dni_extra)
        irrads = irrads.fillna(0)

        return irrads

    def cloud_cover_to_irradiance(self,
                                  cloud_cover,
                                  how='clearsky_scaling',
                                  **kwargs):
        """
        Convert cloud cover to irradiance. A wrapper method.

        Parameters
        ----------
        cloud_cover : Series
        how : str, default 'clearsky_scaling'
            Selects the method for conversion. Can be one of
            clearsky_scaling or liujordan.
        **kwargs
            Passed to the selected method.

        Returns
        -------
        irradiance : DataFrame
            Columns include ghi, dni, dhi
        """

        how = how.lower()
        if how == 'clearsky_scaling':
            irrads = self.cloud_cover_to_irradiance_clearsky_scaling(
                cloud_cover, **kwargs)
        elif how == 'liujordan':
            irrads = self.cloud_cover_to_irradiance_liujordan(
                cloud_cover, **kwargs)
        else:
            raise ValueError('invalid how argument')

        return irrads

    def kelvin_to_celsius(self, temperature):
        """
        Converts Kelvin to celsius.

        Parameters
        ----------
        temperature: numeric

        Returns
        -------
        temperature: numeric
        """
        return temperature - 273.15

    def isobaric_to_ambient_temperature(self, data):
        """
        Calculates temperature from isobaric temperature.

        Parameters
        ----------
        data: DataFrame
            Must contain columns pressure, temperature_iso,
            temperature_dew_iso. Input temperature in K.

        Returns
        -------
        temperature : Series
            Temperature in K
        """

        P = data['pressure'] / 100.0  # noqa: N806
        Tiso = data['temperature_iso']  # noqa: N806
        Td = data['temperature_dew_iso'] - 273.15  # noqa: N806

        # saturation water vapor pressure
        e = 6.11 * 10**((7.5 * Td) / (Td + 273.3))

        # saturation water vapor mixing ratio
        w = 0.622 * (e / (P - e))

        temperature = Tiso - ((2.501 * 10.**6) / 1005.7) * w

        return temperature

    def uv_to_speed(self, data):
        """
        Computes wind speed from wind components.

        Parameters
        ----------
        data : DataFrame
            Must contain the columns 'wind_speed_u' and 'wind_speed_v'.

        Returns
        -------
        wind_speed : Series
        """
        wind_speed = np.sqrt(data['wind_speed_u']**2 + data['wind_speed_v']**2)

        return wind_speed

    def gust_to_speed(self, data, scaling=1 / 1.4):
        """
        Computes standard wind speed from gust.
        Very approximate and location dependent.

        Parameters
        ----------
        data : DataFrame
            Must contain the column 'wind_speed_gust'.

        Returns
        -------
        wind_speed : Series
        """
        wind_speed = data['wind_speed_gust'] * scaling

        return wind_speed
コード例 #40
0
def test_location_invalid_tz():
    with pytest.raises(UnknownTimeZoneError):
        Location(32.2, -111, 'invalid')
コード例 #41
0
from nose.tools import raises, assert_almost_equals
from numpy.testing import assert_almost_equal

from pandas.util.testing import assert_frame_equal

from pvlib.location import Location
from pvlib import clearsky
from pvlib import solarposition
from pvlib import irradiance
from pvlib import atmosphere

from . import requires_ephem

# setup times and location to be tested.
tus = Location(32.2, -111, 'US/Arizona', 700)

# must include night values
times = pd.date_range(start='20140624', end='20140626', freq='1Min', tz=tus.tz)

ephem_data = solarposition.get_solarposition(times,
                                             tus.latitude,
                                             tus.longitude,
                                             method='nrel_numpy')

irrad_data = clearsky.ineichen(times,
                               tus.latitude,
                               tus.longitude,
                               altitude=tus.altitude,
                               linke_turbidity=3,
                               solarposition_method='nrel_numpy')
コード例 #42
0
def test_location_invalid_tz_type():
    with pytest.raises(TypeError):
        Location(32.2, -111, [5])
# In[511]:

### CONFIGURE RUNS
run_train = True  # Disables training & processing of train set; Set it to True for the first time to create a model
#test_location = "Bondville" #Folder name
#test_location = "Boulder" #Folder name
test_location = "Desert_Rock"  #Folder name
#test_location = "Fort_Peck" #Folder name
#test_location = "Goodwin_Creek" #Folder name
#test_location = "Penn_State" #Folder name
#test_location = "Sioux_Falls" #Folder name

# All_locations
#bvl = Location(40.1134,-88.3695, 'US/Central', 217.932, 'Bondville')
#bvl = Location(40.0150,-105.2705, 'US/Mountain', 1655.064, 'Boulder')
bvl = Location(36.621, -116.043, 'US/Pacific', 1010.1072, 'Desert Rock')
# bvl = Location(48,-106.449, 'US/Mountain', 630.0216, 'Fort Peck')
# bvl = Location(34.2487,-89.8925, 'US/Central', 98, 'Goodwin Creek')
# bvl = Location(40.798,-77.859, 'US/Eastern', 351.74, 'Penn State')
# bvl = Location(43.544,-96.73, 'US/Central', 448.086, 'Sioux Falls')

test_year = "2009"

# TEST year 2009
times = pd.DatetimeIndex(start='2009-01-01',
                         end='2010-01-01',
                         freq='1min',
                         tz=bvl.tz)  # 12 months
#  TEST year 2015
#times = pd.DatetimeIndex(start='2015-01-01', end='2016-01-01', freq='1min',tz=bvl.tz)   # 12 months
# TEST year 2016
コード例 #44
0
def golden_mst():
    return Location(39.742476, -105.1786, 'MST', 1830.14)
コード例 #45
0
def test_get_airmass_valueerror(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    with pytest.raises(ValueError):
        clearsky = tus.get_airmass(times, model='invalid_model')
コード例 #46
0
def location():
    return Location(32.2, -111, altitude=700)
コード例 #47
0
#test_location = "Bondville" #Folder name
#test_location = "Boulder" #Folder name
#test_location = "Desert_Rock" #Folder name
#test_location = "Fort_Peck" #Folder name
#test_location = "Goodwin_Creek" #Folder name
#test_location = "Penn_State" #Folder name
test_location = "Sioux_Falls" #Folder name

# All_locations
#bvl = Location(40.1134,-88.3695, 'US/Central', 217.932, 'Bondville')
#bvl = Location(40.0150,-105.2705, 'US/Mountain', 1655.064, 'Boulder')
#bvl = Location(36.621,-116.043, 'US/Pacific', 1010.1072, 'Desert Rock')
#bvl = Location(48,-106.449, 'US/Mountain', 630.0216, 'Fort Peck')
#bvl = Location(34.2487,-89.8925, 'US/Central', 98, 'Goodwin Creek')
#bvl = Location(40.798,-77.859, 'US/Eastern', 351.74, 'Penn State')
bvl = Location(43.544,-96.73, 'US/Central', 448.086, 'Sioux Falls')



test_year = "2015"


# TEST year 2009
#times = pd.DatetimeIndex(start='2009-01-01', end='2010-01-01', freq='1min',tz=bvl.tz)   # 12 months
#  TEST year 2015
times = pd.DatetimeIndex(start='2015-01-01', end='2016-01-01', freq='1min',tz=bvl.tz)   # 12 months 
# TEST year 2016
#times = pd.DatetimeIndex(start='2016-01-01', end='2017-01-01', freq='1min',tz=bvl.tz)   # 12 months 
# Test year 2017
#times = pd.DatetimeIndex(start='2017-01-01', end='2018-01-01', freq='1min',tz=bvl.tz)   # 12 months 
コード例 #48
0
def test_location_required():
    Location(32.2, -111)
コード例 #49
0
DNI = np.zeros(N)
albedo = np.zeros(N)
T = np.zeros(N)

for i in range(N):
    wd = epw.weatherdata[i]
    print wd.year, wd.month, wd.day, wd.hour, wd.minute, wd.dry_bulb_temperature
    I_hor[i] = wd.global_horizontal_radiation
    I_d_hor[i] = wd.diffuse_horizontal_radiation
    DNI[i] = wd.direct_normal_radiation
    albedo[i] = wd.albedo
    T[i] = wd.dry_bulb_temperature
    I_0[i] = wd.extraterrestrial_direct_normal_radiation

# use the pvlib library to define the characteristics of the location:
tus = Location(latitude, longitude, 'Africa/Johannesburg', 700, 'Johannesburg')

# Define the pandas index (arbitrarily set to 2015):
first = epw.weatherdata[0]
last = epw.weatherdata[-1]
times = pd.date_range(start=datetime.datetime(2015, first.month, first.day),
                      end=datetime.datetime(2015, last.month, last.day, 23, 59,
                                            59),
                      freq='h')

# Compute the sun position at each hour of the year:
ephem_data = solarposition.get_solarposition(times,
                                             latitude=latitude,
                                             longitude=longitude,
                                             method='nrel_numpy')
コード例 #50
0
def test_location_all():
    Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
コード例 #51
0
ファイル: forecast.py プロジェクト: dpete2008/Sandia
class ForecastModel(object):
    """
    An object for querying and holding forecast model information for
    use within the pvlib library.

    Simplifies use of siphon library on a THREDDS server.

    Parameters
    ----------
    model_type: string
        UNIDATA category in which the model is located.
    model_name: string
        Name of the UNIDATA forecast model.
    set_type: string
        Model dataset type.

    Attributes
    ----------
    access_url: string
        URL specifying the dataset from data will be retrieved.
    base_tds_url : string
        The top level server address
    catalog_url : string
        The url path of the catalog to parse.
    data: pd.DataFrame
        Data returned from the query.
    data_format: string
        Format of the forecast data being requested from UNIDATA.
    dataset: Dataset
        Object containing information used to access forecast data.
    dataframe_variables: list
        Model variables that are present in the data.
    datasets_list: list
        List of all available datasets.
    fm_models: Dataset
        TDSCatalog object containing all available
        forecast models from UNIDATA.
    fm_models_list: list
        List of all available forecast models from UNIDATA.
    latitude: list
        A list of floats containing latitude values.
    location: Location
        A pvlib Location object containing geographic quantities.
    longitude: list
        A list of floats containing longitude values.
    lbox: boolean
        Indicates the use of a location bounding box.
    ncss: NCSS object
        NCSS
    model_name: string
        Name of the UNIDATA forecast model.
    model: Dataset
        A dictionary of Dataset object, whose keys are the name of the
        dataset's name.
    model_url: string
        The url path of the dataset to parse.
    modelvariables: list
        Common variable names that correspond to queryvariables.
    query: NCSS query object
        NCSS object used to complete the forecast data retrival.
    queryvariables: list
        Variables that are used to query the THREDDS Data Server.
    time: DatetimeIndex
        Time range.
    variables: dict
        Defines the variables to obtain from the weather
        model and how they should be renamed to common variable names.
    units: dict
        Dictionary containing the units of the standard variables
        and the model specific variables.
    vert_level: float or integer
        Vertical altitude for query data.
    """

    access_url_key = 'NetcdfSubset'
    catalog_url = 'http://thredds.ucar.edu/thredds/catalog.xml'
    base_tds_url = catalog_url.split('/thredds/')[0]
    data_format = 'netcdf'
    vert_level = 100000

    units = {
        'temp_air': 'C',
        'wind_speed': 'm/s',
        'ghi': 'W/m^2',
        'ghi_raw': 'W/m^2',
        'dni': 'W/m^2',
        'dhi': 'W/m^2',
        'total_clouds': '%',
        'low_clouds': '%',
        'mid_clouds': '%',
        'high_clouds': '%'}

    def __init__(self, model_type, model_name, set_type):
        self.model_type = model_type
        self.model_name = model_name
        self.set_type = set_type
        self.catalog = TDSCatalog(self.catalog_url)
        self.fm_models = TDSCatalog(self.catalog.catalog_refs[model_type].href)
        self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys()))

        try:
            model_url = self.fm_models.catalog_refs[model_name].href
        except ParseError:
            raise ParseError(self.model_name + ' model may be unavailable.')

        try:
            self.model = TDSCatalog(model_url)
        except HTTPError:
            try:
                self.model = TDSCatalog(model_url)
            except HTTPError:
                raise HTTPError(self.model_name + ' model may be unavailable.')

        self.datasets_list = list(self.model.datasets.keys())
        self.set_dataset()

    def __repr__(self):
        return '{}, {}'.format(self.model_name, self.set_type)

    def set_dataset(self):
        '''
        Retrieves the designated dataset, creates NCSS object, and
        creates a NCSS query object.
        '''

        keys = list(self.model.datasets.keys())
        labels = [item.split()[0].lower() for item in keys]
        if self.set_type == 'best':
            self.dataset = self.model.datasets[keys[labels.index('best')]]
        elif self.set_type == 'latest':
            self.dataset = self.model.datasets[keys[labels.index('latest')]]
        elif self.set_type == 'full':
            self.dataset = self.model.datasets[keys[labels.index('full')]]

        self.access_url = self.dataset.access_urls[self.access_url_key]
        self.ncss = NCSS(self.access_url)
        self.query = self.ncss.query()

    def set_query_latlon(self):
        '''
        Sets the NCSS query location latitude and longitude.
        '''

        if (isinstance(self.longitude, list) and
            isinstance(self.latitude, list)):
            self.lbox = True
            # west, east, south, north
            self.query.lonlat_box(self.latitude[0], self.latitude[1],
                                  self.longitude[0], self.longitude[1])
        else:
            self.lbox = False
            self.query.lonlat_point(self.longitude, self.latitude)

    def set_location(self, time, latitude, longitude):
        '''
        Sets the location for the query.

        Parameters
        ----------
        time: datetime or DatetimeIndex
            Time range of the query.
        '''
        if isinstance(time, datetime.datetime):
            tzinfo = time.tzinfo
        else:
            tzinfo = time.tz

        if tzinfo is None:
            self.location = Location(latitude, longitude)
        else:
            self.location = Location(latitude, longitude, tz=tzinfo)

    def get_data(self, latitude, longitude, start, end,
                 vert_level=None, query_variables=None,
                 close_netcdf_data=True):
        """
        Submits a query to the UNIDATA servers using Siphon NCSS and
        converts the netcdf data to a pandas DataFrame.

        Parameters
        ----------
        latitude: float
            The latitude value.
        longitude: float
            The longitude value.
        start: datetime or timestamp
            The start time.
        end: datetime or timestamp
            The end time.
        vert_level: None, float or integer
            Vertical altitude of interest.
        variables: None or list
            If None, uses self.variables.
        close_netcdf_data: bool
            Controls if the temporary netcdf data file should be closed.
            Set to False to access the raw data.

        Returns
        -------
        forecast_data : DataFrame
            column names are the weather model's variable names.
        """
        if vert_level is not None:
            self.vert_level = vert_level

        if query_variables is None:
            self.query_variables = list(self.variables.values())
        else:
            self.query_variables = query_variables

        self.latitude = latitude
        self.longitude = longitude
        self.set_query_latlon()  # modifies self.query
        self.set_location(start, latitude, longitude)

        self.start = start
        self.end = end
        self.query.time_range(self.start, self.end)

        self.query.vertical_level(self.vert_level)
        self.query.variables(*self.query_variables)
        self.query.accept(self.data_format)

        self.netcdf_data = self.ncss.get_data(self.query)

        # might be better to go to xarray here so that we can handle
        # higher dimensional data for more advanced applications
        self.data = self._netcdf2pandas(self.netcdf_data, self.query_variables)

        if close_netcdf_data:
            self.netcdf_data.close()

        return self.data

    def process_data(self, data, **kwargs):
        """
        Defines the steps needed to convert raw forecast data
        into processed forecast data. Most forecast models implement
        their own version of this method which also call this one.

        Parameters
        ----------
        data: DataFrame
            Raw forecast data

        Returns
        -------
        data: DataFrame
            Processed forecast data.
        """
        data = self.rename(data)
        return data

    def get_processed_data(self, *args, **kwargs):
        """
        Get and process forecast data.

        Parameters
        ----------
        *args: positional arguments
            Passed to get_data
        **kwargs: keyword arguments
            Passed to get_data and process_data

        Returns
        -------
        data: DataFrame
            Processed forecast data
        """
        return self.process_data(self.get_data(*args, **kwargs), **kwargs)

    def rename(self, data, variables=None):
        """
        Renames the columns according the variable mapping.

        Parameters
        ----------
        data: DataFrame
        variables: None or dict
            If None, uses self.variables

        Returns
        -------
        data: DataFrame
            Renamed data.
        """
        if variables is None:
            variables = self.variables
        return data.rename(columns={y: x for x, y in variables.items()})

    def _netcdf2pandas(self, netcdf_data, query_variables):
        """
        Transforms data from netcdf to pandas DataFrame.

        Parameters
        ----------
        data: netcdf
            Data returned from UNIDATA NCSS query.
        query_variables: list
            The variables requested.

        Returns
        -------
        pd.DataFrame
        """
        # set self.time
        try:
            time_var = 'time'
            self.set_time(netcdf_data.variables[time_var])
        except KeyError:
            # which model does this dumb thing?
            time_var = 'time1'
            self.set_time(netcdf_data.variables[time_var])

        data_dict = {key: data[:].squeeze() for key, data in
                     netcdf_data.variables.items() if key in query_variables}

        return pd.DataFrame(data_dict, index=self.time)

    def set_time(self, time):
        '''
        Converts time data into a pandas date object.

        Parameters
        ----------
        time: netcdf
            Contains time information.

        Returns
        -------
        pandas.DatetimeIndex
        '''
        times = num2date(time[:].squeeze(), time.units)
        self.time = pd.DatetimeIndex(pd.Series(times), tz=self.location.tz)

    def cloud_cover_to_ghi_linear(self, cloud_cover, ghi_clear, offset=35,
                                  **kwargs):
        """
        Convert cloud cover to GHI using a linear relationship.

        0% cloud cover returns ghi_clear.

        100% cloud cover returns offset*ghi_clear.

        Parameters
        ----------
        cloud_cover: numeric
            Cloud cover in %.
        ghi_clear: numeric
            GHI under clear sky conditions.
        offset: numeric
            Determines the minimum GHI.
        kwargs
            Not used.

        Returns
        -------
        ghi: numeric
            Estimated GHI.

        References
        ----------
        Larson et. al. "Day-ahead forecasting of solar power output from
        photovoltaic plants in the American Southwest" Renewable Energy
        91, 11-20 (2016).
        """

        offset = offset / 100.
        cloud_cover = cloud_cover / 100.
        ghi = (offset + (1 - offset) * (1 - cloud_cover)) * ghi_clear
        return ghi

    def cloud_cover_to_irradiance_clearsky_scaling(self, cloud_cover,
                                                   method='linear',
                                                   **kwargs):
        """
        Estimates irradiance from cloud cover in the following steps:

        1. Determine clear sky GHI using Ineichen model and
           climatological turbidity.
        2. Estimate cloudy sky GHI using a function of
           cloud_cover e.g.
           :py:meth:`~ForecastModel.cloud_cover_to_ghi_linear`
        3. Estimate cloudy sky DNI using the DISC model.
        4. Calculate DHI from DNI and DHI.

        Parameters
        ----------
        cloud_cover : Series
            Cloud cover in %.
        method : str
            Method for converting cloud cover to GHI.
            'linear' is currently the only option.
        **kwargs
            Passed to the method that does the conversion

        Returns
        -------
        irrads : DataFrame
            Estimated GHI, DNI, and DHI.
        """
        solpos = self.location.get_solarposition(cloud_cover.index)
        cs = self.location.get_clearsky(cloud_cover.index, model='ineichen',
                                        solar_position=solpos)

        method = method.lower()
        if method == 'linear':
            ghi = self.cloud_cover_to_ghi_linear(cloud_cover, cs['ghi'],
                                                 **kwargs)
        else:
            raise ValueError('invalid method argument')

        dni = disc(ghi, solpos['zenith'], cloud_cover.index)['dni']
        dhi = ghi - dni * np.cos(np.radians(solpos['zenith']))

        irrads = pd.DataFrame({'ghi': ghi, 'dni': dni, 'dhi': dhi}).fillna(0)
        return irrads

    def cloud_cover_to_transmittance_linear(self, cloud_cover, offset=0.75,
                                            **kwargs):
        """
        Convert cloud cover to atmospheric transmittance using a linear
        model.

        0% cloud cover returns offset.

        100% cloud cover returns 0.

        Parameters
        ----------
        cloud_cover : numeric
            Cloud cover in %.
        offset : numeric
            Determines the maximum transmittance.
        kwargs
            Not used.

        Returns
        -------
        ghi : numeric
            Estimated GHI.
        """
        transmittance = ((100.0 - cloud_cover) / 100.0) * 0.75

        return transmittance

    def cloud_cover_to_irradiance_liujordan(self, cloud_cover, **kwargs):
        """
        Estimates irradiance from cloud cover in the following steps:

        1. Determine transmittance using a function of cloud cover e.g.
           :py:meth:`~ForecastModel.cloud_cover_to_transmittance_linear`
        2. Calculate GHI, DNI, DHI using the
           :py:func:`pvlib.irradiance.liujordan` model

        Parameters
        ----------
        cloud_cover : Series

        Returns
        -------
        irradiance : DataFrame
            Columns include ghi, dni, dhi
        """
        # in principle, get_solarposition could use the forecast
        # pressure, temp, etc., but the cloud cover forecast is not
        # accurate enough to justify using these minor corrections
        solar_position = self.location.get_solarposition(cloud_cover.index)
        dni_extra = extraradiation(cloud_cover.index)
        airmass = self.location.get_airmass(cloud_cover.index)

        transmittance = self.cloud_cover_to_transmittance_linear(cloud_cover,
                                                                 **kwargs)

        irrads = liujordan(solar_position['apparent_zenith'],
                           transmittance, airmass['airmass_absolute'],
                           dni_extra=dni_extra)
        irrads = irrads.fillna(0)

        return irrads

    def cloud_cover_to_irradiance(self, cloud_cover, how='clearsky_scaling',
                                  **kwargs):
        """
        Convert cloud cover to irradiance. A wrapper method.

        Parameters
        ----------
        cloud_cover : Series
        how : str
            Selects the method for conversion. Can be one of
            clearsky_scaling or liujordan.
        **kwargs
            Passed to the selected method.

        Returns
        -------
        irradiance : DataFrame
            Columns include ghi, dni, dhi
        """

        how = how.lower()
        if how == 'clearsky_scaling':
            irrads = self.cloud_cover_to_irradiance_clearsky_scaling(
                cloud_cover, **kwargs)
        elif how == 'liujordan':
            irrads = self.cloud_cover_to_irradiance_liujordan(
                cloud_cover, **kwargs)
        else:
            raise ValueError('invalid how argument')

        return irrads

    def kelvin_to_celsius(self, temperature):
        """
        Converts Kelvin to celsius.

        Parameters
        ----------
        temperature: numeric

        Returns
        -------
        temperature: numeric
        """
        return temperature - 273.15

    def isobaric_to_ambient_temperature(self, data):
        """
        Calculates temperature from isobaric temperature.

        Parameters
        ----------
        data: DataFrame
            Must contain columns pressure, temperature_iso,
            temperature_dew_iso. Input temperature in K.

        Returns
        -------
        temperature : Series
            Temperature in K
        """

        P = data['pressure'] / 100.0
        Tiso = data['temperature_iso']
        Td = data['temperature_dew_iso'] - 273.15

        # saturation water vapor pressure
        e = 6.11 * 10**((7.5 * Td) / (Td + 273.3))

        # saturation water vapor mixing ratio
        w = 0.622 * (e / (P - e))

        T = Tiso - ((2.501 * 10.**6) / 1005.7) * w

        return T

    def uv_to_speed(self, data):
        """
        Computes wind speed from wind components.

        Parameters
        ----------
        data : DataFrame
            Must contain the columns 'wind_speed_u' and 'wind_speed_v'.

        Returns
        -------
        wind_speed : Series
        """
        wind_speed = np.sqrt(data['wind_speed_u']**2 + data['wind_speed_v']**2)

        return wind_speed

    def gust_to_speed(self, data, scaling=1/1.4):
        """
        Computes standard wind speed from gust.
        Very approximate and location dependent.

        Parameters
        ----------
        data : DataFrame
            Must contain the column 'wind_speed_gust'.

        Returns
        -------
        wind_speed : Series
        """
        wind_speed = data['wind_speed_gust'] * scaling

        return wind_speed
コード例 #52
0
def test_get_airmass_valueerror(times):
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    with pytest.raises(ValueError):
        tus.get_airmass(times, model='invalid_model')
run_train = False  # Disables training & processing of train set; Set it to True for the first time to create a model
#test_location = "Bondville" #Folder name
#test_location = "Boulder" #Folder name
#test_location = "Desert_Rock" #Folder name
#test_location = "Fort_Peck" #Folder name
#test_location = "Goodwin_Creek" #Folder name
test_location = "Penn_State"  #Folder name
#test_location = "Sioux_Falls" #Folder name

# All_locations
#bvl = Location(40.1134,-88.3695, 'US/Central', 217.932, 'Bondville')
#bvl = Location(40.0150,-105.2705, 'US/Mountain', 1655.064, 'Boulder')
#bvl = Location(36.621,-116.043, 'US/Pacific', 1010.1072, 'Desert Rock')
#bvl = Location(48,-106.449, 'US/Mountain', 630.0216, 'Fort Peck')
#bvl = Location(34.2487,-89.8925, 'US/Central', 98, 'Goodwin Creek')
bvl = Location(40.798, -77.859, 'US/Eastern', 351.74, 'Penn State')
#bvl = Location(43.544,-96.73, 'US/Central', 448.086, 'Sioux Falls')

test_year = "2017"

# TEST year 2009
#times = pd.DatetimeIndex(start='2009-01-01', end='2010-01-01', freq='1min',tz=bvl.tz)   # 12 months
#  TEST year 2015
#times = pd.DatetimeIndex(start='2015-01-01', end='2016-01-01', freq='1min',tz=bvl.tz)   # 12 months
# TEST year 2016
#times = pd.DatetimeIndex(start='2016-01-01', end='2017-01-01', freq='1min',tz=bvl.tz)   # 12 months
# Test year 2017
times = pd.DatetimeIndex(start='2017-01-01',
                         end='2018-01-01',
                         freq='1min',
                         tz=bvl.tz)  # 12 months
コード例 #54
0
ファイル: clearsky.py プロジェクト: yaoguaifashu/clearsky
import os
import itertools
import matplotlib.pyplot as plt
import pandas as pd
import pvlib
from pvlib import clearsky, atmosphere, solarposition
from pvlib.location import Location
from pvlib.iotools import read_tmy3

tus = Location(34.6937378, 135.5021651, 'Asia/Tokyo', 100, 'Tokyo')
times = pd.DatetimeIndex(start='2019-07-01', end='2019-07-04', freq='1min', tz=tus.tz)
cs = tus.get_clearsky(times)
cs.plot()
plt.show()
コード例 #55
0
from numpy.testing import assert_allclose
import pytest

from pvlib._deprecation import pvlibDeprecationWarning
from pvlib.location import Location
from pvlib import solarposition, spa

from conftest import (fail_on_pvlib_version, requires_ephem, needs_pandas_0_17,
                      requires_spa_c, requires_numba)


# setup times and locations to be tested.
times = pd.date_range(start=datetime.datetime(2014,6,24),
                      end=datetime.datetime(2014,6,26), freq='15Min')

tus = Location(32.2, -111, 'US/Arizona', 700) # no DST issues possible
times_localized = times.tz_localize(tus.tz)

tol = 5


@pytest.fixture()
def golden():
    return Location(39.742476, -105.1786, 'America/Denver', 1830.14)


@pytest.fixture()
def golden_mst():
    return Location(39.742476, -105.1786, 'MST', 1830.14)

コード例 #56
0
	saves[i] = solar_times(locations[i], times)
	print(i)
#still too slow, need to vectorize the equations so that I can get all the solutions at once. 
#tchang
def solar_noon(lat, lon, elev):
	#calculates solar noon given a lat, lon, and elevation
	#all these functions lag in performance because we must instantiate the object for location and times every time....
	current_location = Location(lat, lon, 'America/Denver', elev, 'GYE') 
	times = pd.date_range(start=datetime.datetime(y, m, d), end = datetime.datetime(y,m,d+1), freq='1Min')
	spaout = pvlib.solarposition.spa_python(times, current_location)
	sn_t = spaout.apparent_elevation.idxmax().hour + spaout.apparent_elevation.idxmax().minute/60
	return(sn_t)

#running a loop on this should take about 3 hours per day evaluated....this is too slow. need a better solution.
#calculations would take 8 years to produce a solution for all days and all cells. 

vec_solar_noon = np.vectorize(solar_noon)
out = vec_solar_noon(lats, longs, elevs)
for i in range(len(lat)):
	for j in range(len(lon)):
		current_location = Location(lat[i], lon[j], 'America/Denver', elev[i][j], 'GYE')
		locations.append(current_location)
		spaout = pvlib.solarposition.spa_python(times, current_location)
		#get the solar noon values first to check on it
		sn_t = spaout.apparent_elevation.idxmax().hour + spaout.apparent_elevation.idxmax().minute/60
		solar_noon[i][j] = sn_t
		
		


コード例 #57
0
def golden():
    return Location(39.742476, -105.1786, 'America/Denver', 1830.14)
コード例 #58
0
ファイル: test_location.py プロジェクト: caskeep/pvlib-python
def test_Location___repr__():
    tus = Location(32.2, -111, 'US/Arizona', 700, 'Tucson')
    assert tus.__repr__()==('Tucson: latitude=32.2, longitude=-111, '+
    'tz=US/Arizona, altitude=700')