示例#1
0
    def physicals(self):

        sjulian = jd.datetime_to_jd(self.sdate)
        ejulian = jd.datetime_to_jd(self.edate)

        conn = sqlite3.connect(self.filename)

        querystring = 'SELECT day,weight AS Weight, bodyfat AS Bodyfat FROM Physicals WHERE day<=%s AND day >=%s' % (
            ejulian, sjulian)

        df = pd.read_sql_query(querystring, conn)

        for index, row in df.iterrows():
            df.at[index, 'Date'] = au.jdate(df.at[index, 'day'])

        df = df.drop(columns=['day'])
        df['Date'] = pd.to_datetime(df['Date'])
        df['Weight'] = df['Weight'] * 0.001
        df['Bodyfat'] = df['Bodyfat'] * 0.1
        df[['Weight', 'Bodyfat']] = df[['Weight',
                                        'Bodyfat']].replace(0.0, np.nan)

        df = df.round({'Weight': 1, 'Bodyfat': 1})

        return df
示例#2
0
def compute_vbarycenter( spectrum):
    """ 
    Compute the velocity correction to Barycentric for this date and direction
    """
    global firstRun 

    if baryCenterAvailable: 
        longitude = spectrum.tellon
        latitude = spectrum.tellat
        altitude = spectrum.telelev
        ra2000 = spectrum.ra
        dec2000 = spectrum.dec

        # need to convert date time to Julian date
        jd = jdutil.datetime_to_jd(spectrum.utc)

# Calculate barycentric correction (debug=True show
# various intermediate results)
        corr, hjd = pyasl.helcorr(longitude, latitude, altitude, \
                                      ra2000, dec2000, jd, debug=doDebug)
        if doDebug or firstRun:
            print("Barycentric correction [km/s]: %8.3f" % (corr))
            firstRun = False
    else:
        corr = 0.
    return corr
示例#3
0
    def druns(self):

        sjulian = jd.datetime_to_jd(self.sdate)
        ejulian = jd.datetime_to_jd(self.edate)

        conn = sqlite3.connect(self.filename)

        conn.create_function('jdate', 1, au.jdate)
        conn.create_aggregate('avehr', 2, au.aveHR)

        sqlite3.enable_callback_tracebacks(True)

        querystring = 'SELECT runs.id AS RunID,jdate(day) AS Date,SUM(dist) AS Distance,\
                                        SUM(t) AS Time,\
                                        AVEHR(hr,t) AS Heartrate\
                                        FROM run_splits\
                                        INNER JOIN runs\
                                        ON runs.id=run_splits.run_id\
                                        WHERE day<=%s AND day>=%s\
                                        GROUP BY run_splits.run_id\
                                        ORDER BY day ASC' % (ejulian, sjulian)

        df = pd.read_sql_query(querystring, conn)

        df[['Time', 'Distance',
            'Heartrate']] = df[['Time', 'Distance',
                                'Heartrate']].replace(0, np.nan)
        df['Date'] = pd.to_datetime(df['Date'])
        df['Distance'] = df.apply(lambda row: row.Distance * 0.001, axis=1)
        df['Pace'] = df.apply(lambda row: au.pace(row.Time, row.Distance),
                              axis=1)
        df['TxHR'] = df.apply(lambda row: row.Time * row.Heartrate, axis=1)
        df['TxHR'] = df['TxHR'].replace(0.0, np.nan)

        df = df.round({'Distance': 1, 'Time': 0, 'Heartrate': 0})

        return df
示例#4
0
    def runs(self):

        sjulian = jd.datetime_to_jd(self.sdate)
        ejulian = jd.datetime_to_jd(self.edate)

        conn = sqlite3.connect(self.filename)

        conn.create_function('jdate', 1, au.jdate)
        conn.create_aggregate('avehr', 2, au.aveHR)

        sqlite3.enable_callback_tracebacks(True)

        querystring = 'SELECT runs.id as RunID,runs.starttime as StartTime,\
                        locations.name as Location,\
                        run_types.name AS RunType,surfaces.name AS Surface,\
                        shoes.name AS Shoe,shoes.brand AS Brand\
                        FROM runs\
                        INNER JOIN locations\
                            ON locations.id=runs.location_id\
                        INNER JOIN run_types\
                            ON run_types.id=runs.runtype_id\
                        INNER JOIN surfaces\
                            ON surfaces.id=runs.surface_id\
                        INNER JOIN shoes\
                            ON shoes.id=runs.shoe_id\
                        WHERE day<=%s AND day>=%s\
                        ORDER BY day ASC' % (ejulian, sjulian)

        df = pd.read_sql_query(querystring, conn)

        druns = self.druns()

        runs = pd.merge_ordered(druns, df, on='RunID')
        runs = runs.drop(columns=['TxHR'])
        runs = runs.set_index('RunID')

        return runs
def getHour(uHora, grado):
    for hora in range(24):
        #print('Hora: '+str(hora))
        encontrado = False
        for minuto in range(60):
            #print('Minuto: '+str(minuto))
            uHora += datetime.timedelta(minutes=1)
            #print(str(d))
            jd1 = jdutil.datetime_to_jd(uHora)
            h = swe.houses(jd1, float(lat), float(lon),
                           bytes("P", encoding="utf-8"))
            gasc = float(h[0][0])
            if gasc >= grado - 1.00 and gasc <= grado + 1.00:
                #print(str(gasc))
                uHora = uHora + datetime.timedelta(hours=int(gmt))
                encontrado = True
                break
        if encontrado:
            break

    return uHora
def getHour(uHora, grado):
    for hora in range(24):
        #print('Hora: '+str(hora))        
        encontrado=False
        tuplaRet=[(uHora,encontrado)]
        for minuto in range(60):
            #print('Minuto: '+str(minuto))
            uHora  += datetime.timedelta(minutes=1)
            #print(str(d))
            jd1 =jdutil.datetime_to_jd(uHora)
            pos=swe.calc_ut(jd1, 0, flag=swe.FLG_SWIEPH+swe.FLG_SPEED)
            gsol=float(pos[0][0])
            #print(str(gsol) + '<----->' + str(grado))
            if  gsol >= grado:
                #print(str(gsol))
                uHora = uHora + datetime.timedelta(hours=float(gmt))
                encontrado=True
                tuplaRet=[(uHora,encontrado)]
                #tuplaRet[1]=uHora
                break
        if encontrado:
            break

    return tuplaRet
示例#7
0
horaLocal = horaLocal - datetime.timedelta(hours=int(gmt))
#horaLocal = horaLocal - datetime.timedelta(minutes=int(30))
#print(horaLocal)

#Luego de aplicar el GMT
dia = horaLocal.strftime('%d')
mes = int(horaLocal.strftime('%m'))
anio = horaLocal.strftime('%Y')
hora = horaLocal.strftime('%H')
min = horaLocal.strftime('%M')

#print('Tiempo: ' + dia + '/' + mes + '/' + anio + ' ' + hora + ':' + min)

swe.set_ephe_path('/usr/share/libswe/ephe')

d = datetime.datetime(int(anio), int(mes), int(dia), int(hora), int(min))
jd1 = jdutil.datetime_to_jd(d)

np = [('Sol', 0), ('Luna', 1), ('Mercurio', 2), ('Venus', 3), ('Marte', 4),
      ('Júpiter', 5), ('Saturno', 6), ('Urano', 7), ('Neptuno', 8),
      ('Plutón', 9), ('Nodo Norte', 11), ('Nodo Sur', 10), ('Quirón', 15),
      ('Selena', 57), ('Lilith', 12)]

#La oblicuidad de calcula con ipl = SE_ECL_NUT = -1 en SWE pero en swisseph ECL_NUT = -1
posObli = swe.calc(jd1, -1, flag=swe.FLG_SWIEPH + swe.FLG_SPEED)
oblicuidad = posObli[0][0]
#pos=swe.calc_ut(jd1, np[4][1], flag=swe.FLG_SWIEPH+swe.FLG_SPEED)
#pos=swe.calc_ut(jd1, np[4][1], flag=swe.FLG_SWIEPH)
pos = swe.calc_ut(jd1, np[2][1], flag=swe.FLG_SPEED)
print(pos)
示例#8
0
def convert_fitacf_data(date, in_fname, radar_info):
    day = in_fname.split('.')[0].split('/')[-1]
    month = day[:-2] 
    
    # Keep track of fitACF files that have multiple beam definitions in a
    # monthly log file
    multiBeamLogDir = date.strftime(helper.FIT_NET_LOG_DIR) + month
    multiBeamLogfile = '{dir}/multi_beam_defs_{m}.log'.format(dir = multiBeamLogDir, m = month)

    # Store conversion info like returns outside FOV, missing slist, etc 
    # for each conversion
    conversionLogDir = '{dir}/{d}'.format(dir = multiBeamLogDir, d = day)
    fName = in_fname.split('/')[-1]
    conversionLogfile = '{dir}/{fit}_to_nc.log'.format(dir = conversionLogDir, fit = fName)

    # Define the name of the file holding the list of rawACFs used to 
    # create the fitACF
    rawacfListFilename = '.'.join(in_fname.split('.')[:-1]) + '.rawacfList.txt'

    SDarn_read = pydarn.SuperDARNRead(in_fname)
    data = SDarn_read.read_fitacf()
    bmdata = {
        'rsep': [],
        'frang': [],
    }
    for rec in data:
        for k, v in bmdata.items():
            bmdata[k].append(rec[k])
        if 'slist' in rec.keys():
            if radar_info['maxrg'] < rec['slist'].max():
                radar_info['maxrg'] = rec['slist'].max() + 5
    
    for k, v in bmdata.items():
        val = np.unique(v)
        if len(val) > 1:        
            os.makedirs(conversionLogDir, exist_ok=True)
            os.makedirs(multiBeamLogDir, exist_ok=True)
            
            # Log the multiple beams error in the monthly mutli beam def log
            logText = '{fitacfFullFile} has {numBeamDefs} beam definitions - skipping file conversion.\n'.format(fitacfFullFile = in_fname, numBeamDefs = len(val))
            
            with open(multiBeamLogfile, "a+") as fp: 
                fp.write(logText)

            # Log the multiple beams error in this fitACF's conversion log
            with open(conversionLogfile, "a+") as fp: 
                fp.write(logText)

            return MULTIPLE_BEAM_DEFS_ERROR_CODE, MULTIPLE_BEAM_DEFS_ERROR_CODE
        
        bmdata[k] = int(val)

    # Define FOV
    fov = radFov.fov(
        frang=bmdata['frang'], rsep=bmdata['rsep'], site=None, nbeams=int(radar_info['maxbeams']),
        ngates=int(radar_info['maxrg']), bmsep=radar_info['beamsep'], recrise=radar_info['risetime'], siteLat=radar_info['glat'],
        siteLon=radar_info['glon'], siteBore=radar_info['boresight'], siteAlt=radar_info['alt'], siteYear=date.year,
        elevation=None, altitude=300., hop=None, model='IS',
        coords='geo', date_time=date, coord_alt=0., fov_dir='front',
    )

    # Define fields 
    short_flds = 'tfreq', 'noise.sky', 'cp',
    fov_flds = 'mjd', 'beam', 'range', 'lat', 'lon', 
    data_flds = 'p_l', 'v', 'v_e', 'gflg', 
    elv_flds = 'elv', 'elv_low', 'elv_high',

    # Figure out if we have elevation information
    elv_exists = True
    for rec in data:
        if 'elv' not in rec.keys():
            elv_exists = False
    if elv_exists:
        data_flds += elv_flds

    # Set up data storage
    out = {}
    for fld in (fov_flds + data_flds + short_flds):
        out[fld] = []
   
    # Run through each beam record and store 
    for rec in data:
        time = dt.datetime(rec['time.yr'], rec['time.mo'], rec['time.dy'], rec['time.hr'], rec['time.mt'], rec['time.sc'])
        # slist is the list of range gates with backscatter
        if 'slist' not in rec.keys():
            os.makedirs(conversionLogDir, exist_ok=True)
            logText = 'Could not find slist in record {recordTime} - skipping\n'.format(recordTime = time.strftime('%Y-%m-%d %H:%M:%S'))
            with open(conversionLogfile, "a+") as fp: 
                fp.write(logText)

            continue

        # Can't deal with returns outside of FOV
        if rec['slist'].max() >= fov.slantRCenter.shape[1]:
            os.makedirs(conversionLogDir, exist_ok=True)

            # Log returns outside of FOV
            logText = 'Record {recordTime} found to have a max slist of {maxSList} - skipping record/n'.format(recordTime = time.strftime('%Y-%m-%d %H:%M:%S'), maxSList = rec['slist'].max())
            with open(conversionLogfile, "a+") as fp: 
                fp.write(logText)

            continue

        time = dt.datetime(rec['time.yr'], rec['time.mo'], rec['time.dy'], rec['time.hr'], rec['time.mt'], rec['time.sc'])
        one_obj = np.ones(len(rec['slist'])) 
        mjd = jdutil.jd_to_mjd(jdutil.datetime_to_jd(time))
        bmnum = one_obj * rec['bmnum']
        fovi = fov.beams == rec['bmnum']
        out['mjd'] += (one_obj * mjd).tolist()
        out['beam'] += bmnum.tolist()
        out['range'] += fov.slantRCenter[fovi, rec['slist']].tolist()
        out['lat'] += fov.latCenter[fovi, rec['slist']].tolist()
        out['lon'] += fov.lonCenter[fovi, rec['slist']].tolist()

        for fld in data_flds:
            out[fld] += rec[fld].tolist()
        for fld in short_flds:  # expand out to size
            out[fld] += (one_obj * rec[fld]).tolist()

    # Convert to numpy arrays 
    for k, v in out.items():
        out[k] = np.array(v)

    # Calculate beam azimuths assuming 20 degrees elevation
    beam_off = radar_info['beamsep'] * (fov.beams - (radar_info['maxbeams'] - 1) / 2.0)
    el = 15.
    brng = np.zeros(beam_off.shape)
    for ind, beam_off_elzero in enumerate(beam_off):
        brng[ind] = radFov.calcAzOffBore(el, beam_off_elzero, fov_dir=fov.fov_dir) + radar_info['boresight']

    # Pull the fit version out of the fitACF filename
    fit_version = '.'.join(in_fname.split('.')[-3:-1])

    # Load the list of rawacf files used to create the fitacf and netcdf    
    with open(rawacfListFilename, "rb") as fp:
        rawacf_source_files = pickle.load(fp)

    # Once the list of rawacf source files has been loaded, delete the file used to
    # temporarily store that information
    os.system('rm {rawacfListFile}'.format(rawacfListFile = rawacfListFilename))
    
    hdr = {
        'lat': radar_info['glat'],
        'lon': radar_info['glon'],
        'alt': radar_info['alt'],
        'rsep': bmdata['rsep'],
        'maxrg': radar_info['maxrg'],
        'bmsep': radar_info['beamsep'],
        'boresight': radar_info['boresight'],
        'beams': fov.beams,
        'brng_at_15deg_el': brng,
        'fitacf_version': fit_version,
        'rawacf_source': rawacf_source_files
    }
    return out, hdr
示例#9
0
文件: ueb.py 项目: Castronova/EMIT
    def __init__(self, config_params):
        super(ueb, self).__init__(config_params)

        # build inputs and outputs
        io = mdl.build_exchange_items_from_config(config_params)

        # set input and output exchange items
        self.inputs(value=io[stdlib.ExchangeItemType.INPUT])
        self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT])

        # grab the C library path and the control file path
        lib = config_params['lib']
        conFile = config_params['control']

        # load the UEB C library
        self.__uebLib = cdll.LoadLibrary(join(os.path.dirname(__file__), lib))

        # save the current directory for saving output data
        self.curdir = os.path.dirname(os.path.abspath(conFile))

        # the base directory for the control file is used to convert relative paths into absolute paths
        self.base_dir = os.path.dirname(conFile)

        # get param, sitevar, input, output, and watershed files
        with open(conFile, 'r') as f:
            # lines = f.readlines()
            lines = f.read().splitlines()  # this will auto strip the \n \r
            C_paramFile = os.path.join(self.base_dir, lines[1])
            C_sitevarFile = os.path.join(self.base_dir, lines[2])
            C_inputconFile = os.path.join(self.base_dir, lines[3])
            C_outputconFile = os.path.join(self.base_dir, lines[4])
            C_watershedFile = os.path.join(self.base_dir, lines[5])
            C_wsvarName = lines[6].split(' ')[0]
            C_wsycorName = lines[6].split(' ')[1]
            C_wsxcorName = lines[6].split(' ')[2]
            C_aggoutputconFile = os.path.join(self.base_dir, lines[7])
            C_aggoutputFile = os.path.join(self.base_dir, lines[8])
            ModelStartDate = [int(float(l)) for l in lines[9].split(' ') if l != '']
            ModelEndDate = [int(float(l)) for l in lines[10].split(' ') if l != '']
            ModelDt = float(lines[11])
            ModelUTCOffset = float(lines[12])
            inpDailyorSubdaily = bool(lines[13]==True)
            self.outtStride, outyStep, outxStep = [int(s) for s in lines[14].split(' ')]


        C_wsxcorArray = c_float()
        C_wsycorArray = c_float()
        self.C_wsArray = pointer(pointer(c_int32()))
        self.C_dimlen1 = c_int()
        self.C_dimlen2 = c_int()
        totalgrid = 0
        self.C_wsfillVal = c_int(-9999)
        npar = c_int(32)
        tinitTime = c_int(0)
        self.C_parvalArray = pointer(c_float(0));
        numOut = 70 # hack: number of outputs?

        self.C_pOut = pointer(pointOutput())
        C_aggOut = pointer(aggOutput())
        self.C_ncOut = pointer(ncOutput())
        self.C_npout = c_int(0)
        self.C_nncout = c_int(0)
        C_naggout = c_int(0)
        C_nZones = c_int(0)

        C_tNameout = c_char_p("time")
        tunits = (c_char*256)()
        C_tUnitsout = pointer(tunits)
        C_tlong_name = c_char_p("time")
        C_tcalendar = c_char_p("standard")
        self.t_out = pointer(c_float(0))
        C_out_fillVal = c_float(-9999.0)

        self.C_outDimord = c_int(0)
        C_aggoutDimord = c_int(1)
        self.outvarindx = c_int(17)
        # aggoutvarindx = c_int(17)
        # size = c_int()
        # rank = c_int()
        # irank = c_int()
        # jrank = c_int()
        # startTimeT = c_double(0.0)
        # TotalTime = c_double(0.0)
        # totalmodelrunTime = c_double(0.0)
        # TsReadTime = c_double(0.0)
        # TSStartTime = c_double()
        # ComputeStartTime = c_double()
        # ComputeTime = c_double(0.0)
        # OutWriteTime = c_double()

        self.uebVars = (c_char_p * 70)("Year", "Month", "Day", "dHour", "atff", "HRI", "Eacl", "Ema", "conZen", "Ta", "P", "V", "RH", "Qsi", "Qli", "Qnet","Us", "SWE", "tausn", "Pr", "Ps", "Alb", "QHs", "QEs", "Es", "SWIT", "QMs", "Q", "FM", "Tave", "TSURFs", "cump", "cumes", "cumMr", "Qnet", "smelt", "refDepth", "totalRefDepth", "cf", "Taufb", "Taufd", "Qsib", "Qsid", "Taub", "Taud", "Qsns", "Qsnc", "Qlns", "Qlnc", "Vz", "Rkinsc", "Rkinc", "Inmax", "intc", "ieff", "Ur", "Wc", "Tc", "Tac", "QHc", "QEc", "Ec", "Qpc", "Qmc", "Mc", "FMc", "SWIGM", "SWISM", "SWIR", "errMB")


        C_zName = c_char_p("Outletlocations")

        C_tcorvar = pointer((c_float * 13)())
        self.C_tsvarArray = pointer((c_float * 13)())
        C_tsvarArrayTemp = pointer((c_float * 5)())



        #todo: [#] == pointer, * == pointer

        self.C_tsvarArray = pointer((POINTER(c_float)*13)())

        C_ntimesteps = pointer((c_int * 5)())

        # create pointer to instance of sitevar struct array
        self.C_strsvArray = pointer((sitevar * 32)())

        # create pointer to instance of inpforcvar struct array
        self.C_strinpforcArray = pointer((inpforcvar * 13)())

        # mask = c_float()
        # pcap_lookupnet(dev, ctypes.byref(net), ctypes.byref(mask), errbuf)

        # read watershed netcdf file
        self.__uebLib.readwsncFile(C_watershedFile, C_wsvarName, C_wsycorName, C_wsxcorName, byref(C_wsycorArray), byref(C_wsxcorArray), byref(self.C_wsArray), byref(self.C_dimlen1), byref(self.C_dimlen2), byref(self.C_wsfillVal))


        wsArray1D = numpy.empty((self.C_dimlen1.value*self.C_dimlen2.value),dtype=numpy.float)
        for i in xrange(self.C_dimlen1.value) :
            for j in xrange(self.C_dimlen2.value):
                wsArray1D[i*self.C_dimlen2.value + j] = self.C_wsArray[i][j];

        # zvalues is the unique set of wsArray1D
        zValues = list(set(wsArray1D))
        # fillset = [wsfillVal]

        # zVal is the set of zValues that do not equal wsFillVal
        zVal = [zValues[i] for i in xrange(len(zValues)) if zValues[i] != self.C_wsfillVal]

        C_nZones = len(zVal)
        z_ycor = [0.0 for i in xrange(C_nZones)]
        z_xcor = [0.0 for i in xrange(C_nZones)]


        # read params (#194)
        self.__uebLib.readParams(C_paramFile, byref(self.C_parvalArray), npar)

        # read site variables (#200)
        self.__uebLib.readSiteVars(C_sitevarFile, byref(self.C_strsvArray))


        # read 2d NetCDF Data
        for i  in range(0,32):
            a = self.C_strsvArray.contents[i]
            if a.svType == 1:
                # print "%d %s %s\n" % (i, a.svFile,a.svVarName)
                retvalue = self.__uebLib.read2DNC(os.path.join(self.base_dir, a.svFile), a.svVarName, byref(a.svArrayValues))


        #//read input /forcing control file--all possible entries of input control have to be provided
        #readInputForcVars(inputconFile, strinpforcArray);

        # read input force variables (main.cpp, line 219)
        self.__uebLib.readInputForcVars(cast(C_inputconFile,c_char_p), self.C_strinpforcArray)


        # elog.info('UEB Start Date: %s' % sd.strftime("%m-%d-%Y %H:%M:%S"))
        # elog.info('UEB End Date: %s' % ed.strftime("%m-%d-%Y %H:%M:%S"))

        # calculate model time span as a julian date (main.cpp, line 220)
        modelSpan =  jdutil.datetime_to_jd(datetime.datetime(*ModelEndDate)) - \
                     jdutil.datetime_to_jd(datetime.datetime(*ModelStartDate))

        # setup the model start dates as UEB expects them
        ModelStartHour = ModelStartDate.pop(-1) # an integer representing the start hour (24 hour time)
        ModelEndHour = ModelEndDate.pop(-1)     # an integer representing the end hour (24 hour time)
        # ModelStartDate is a 3 element array: [year, month, day]
        # ModelEndDate is a 3 element array: [year, month, day]

        # convert Simulation Time parameters into ctypes
        self.C_ModelStartDate = (c_int * len(ModelStartDate))(*ModelStartDate)
        self.C_ModelEndDate =(c_int * len(ModelEndDate))(*ModelEndDate)
        self.C_ModelDt = c_double(ModelDt)
        self.C_ModelUTCOffset = c_double(ModelUTCOffset)
        self.C_ModelStartHour = c_double(ModelStartHour)
        self.C_ModelEndHour = c_double(ModelEndHour)


        # calculate model time steps (main.cpp, line 222)
        self.numTimeStep = int(math.ceil(modelSpan*(24./ModelDt)) ) + 1

        # initialize C_tsvarArray values (this replaces __uebLib.readTextData)
        self.initialize_timeseries_variable_array(self.C_strinpforcArray, self.numTimeStep)

        # NOTE: C_strinpforcArray stores info about the forcing data files

        # # read forcing data (main.cpp, line 226)
        # if self.C_strsvArray.contents[16].svType != 3: # no accumulation zone (fixme: ???)
        #     for it in xrange(13):
        #         inftype = self.C_strinpforcArray.contents[it].infType
        #         print 'infFile: ',self.C_strinpforcArray.contents[it].infFile
        #         if inftype == 0:
        #
        #             # read the files stored in C_strinpforcArray and populated C_tsvarArray
        #             self.__uebLib.readTextData(os.path.join(self.base_dir, self.C_strinpforcArray.contents[it].infFile), byref(self.C_tsvarArray.contents[it]), byref(C_ntimesteps[0]))
        #
        #         elif inftype == 2 or inftype == -1:
        #             self.C_tsvarArray.contents[it] = (c_float * 2)()
        #             C_ntimesteps.contents[0] = 2
        #             # copy the default value if a single value is the option
        #             self.C_tsvarArray.contents[it][0] = self.C_strinpforcArray.contents[it].infType
        #             self.C_tsvarArray.contents[it][1] = self.C_strinpforcArray.contents[it].infdefValue


        # :: this array is initialized to (numOut+1, numTimeStep+1) rather than (numOut, numTimeStep)
        # :: b/c otherwise the calculations from RunUEB are incorrect for the first row.
        # :: e.g.
        # ::     2009 2010 5   30.000        23.999979
        # ::  rather than:
        # ::     2009 10 1    0.000         0.569902
        # ::
        # :: I thought this was b/c numpy.float32 (and 64) are smaller than c_float, however this change
        # :: below didn't fix the problem.
        # ::
        # create a numpy array for outputs
        self.outvarArray = numpy.zeros(shape=(numOut+1, self.numTimeStep), dtype=numpy.float, order="C")
        # arrays_old = self.outvarArray.astype(numpy.float32)
        arrays = self.outvarArray.astype(c_float)
        rows, cols = self.outvarArray.shape
        arrays_as_list = list(arrays)
        #get ctypes handles
        ctypes_arrays = [numpy.ctypeslib.as_ctypes(array) for array in arrays_as_list]
        #Pack into pointer array
        self.C_outvarArray = (POINTER(c_float) * rows)(*ctypes_arrays)

        # x = numpy.zeros(shape=(numOut, self.numTimeStep), dtype=numpy.float, order="C")
        # _floatpp = numpy.ctypeslib.ndpointer(dtype=numpy.uintp, ndim=1, flags='C')
        # xpp = (x.__array_interface__['data'][0] + numpy.arange(x.shape[0])*x.strides[0]).astype(numpy.uintp)
        # self.C_outvarArray = pointer(((POINTER(c_float) * self.numTimeStep) * numOut)())


        # a = (c_float * self.numTimeStep)()
        # outvarArray = pointer((a * numOut)())
        # for i in xrange(numOut):
        #     outvarArray[i] = pointer((c_float * self.numTimeStep)())

        # total grid size to compute progess
        totalgrid = self.C_dimlen1.value*self.C_dimlen2.value

        # read output control file (main.cpp, line 251)
        # readOutputControl(outputconFile, aggoutputconFile, pOut, ncOut, aggOut, npout, nncout, naggout);

        self.__uebLib.readOutputControl(cast(C_outputconFile,c_char_p), cast(C_aggoutputconFile, c_char_p),
                                        byref(self.C_pOut), byref(self.C_ncOut), byref(C_aggOut),
                                        byref(self.C_npout), byref(self.C_nncout), byref(C_naggout))


        # create output netcdf
        self.C_outtSteps = self.numTimeStep / self.outtStride
        self.t_out = numpy.empty(shape=(self.C_outtSteps), dtype=numpy.float, order="C")
        for i in xrange(self.C_outtSteps):
            self.t_out[i] = i*self.outtStride*ModelDt

        # initialize the output arrays
        aggoutvarArray = numpy.zeros((C_nZones,C_naggout.value, self.C_outtSteps), dtype=numpy.float)
        totalAgg = numpy.empty((self.C_outtSteps,), dtype=numpy.float)
        ZonesArr = numpy.zeros((C_nZones,), dtype=numpy.int32)

        # main.cpp, line 290
        # CREATE 3D NC OUTPUT FILES
        # convert self.t_out into a float pointer
        C_t_out = self.t_out.ctypes.data_as(POINTER(c_float))
        for i in xrange(self.C_nncout.value):
            '''
            for (int icout = 0; icout < nncout; icout++)
                retvalue = create3DNC_uebOutputs(ncOut[icout].outfName, (const char*)ncOut[icout].symbol, (const char*)ncOut[icout].units, tNameout, tUnitsout,
            tlong_name, tcalendar, outtSteps, outDimord, self.t_out, &out_fillVal, watershedFile, wsvarName, wsycorName, wsxcorName);
            '''

            retvalue = self.__uebLib.create3DNC_uebOutputs(self.C_ncOut[i].outfName, cast(self.C_ncOut[i].symbol, c_char_p), cast(self.C_ncOut[i].units, c_char_p), C_tNameout, C_tUnitsout, C_tlong_name, C_tcalendar, self.C_outtSteps, self.C_outDimord, C_t_out, byref(C_out_fillVal), C_watershedFile, C_wsvarName, C_wsycorName, C_wsxcorName);

        # CREATE 3D NC AGGREGATE OUTPUT FILE
        # convert z_ycor and x_xcor from list into ctype
        C_z_xcor = numpy.asarray(z_xcor).ctypes.data_as(POINTER(c_float))
        C_z_ycor = numpy.asarray(z_ycor).ctypes.data_as(POINTER(c_float))
        retvalue = self.__uebLib.create3DNC_uebAggregatedOutputs(C_aggoutputFile, C_aggOut, C_naggout, C_tNameout, C_tUnitsout, C_tlong_name, C_tcalendar, self.C_outtSteps, C_aggoutDimord, C_t_out, byref(C_out_fillVal), C_watershedFile, C_wsvarName, C_wsycorName, C_wsxcorName, C_nZones, C_zName, C_z_ycor, C_z_xcor);


        # todo: create output element set
        # print 'Output Calculations available at: '
        # for pid in xrange(self.C_npout.value):
        #     print "  Point(",self.C_pOut[pid].xcoord,", ",self.C_pOut[pid].ycoord,') '



        # todo: This is where UEB grid points are defined!, expose these as input/output spatial objects
        # main.cpp, line 303
        self.activeCells = []
        # print 'Calculations will be performed at: '
        for iy in xrange(self.C_dimlen1.value):
            for jx in xrange(self.C_dimlen2.value):
                if self.C_wsArray[iy][jx] != self.C_wsfillVal.value and self.C_strsvArray.contents[16].svType != 3:
                    # print "  Point(",jx,", ",iy,') '
                    self.activeCells.append((iy, jx))

        # build output exchange items
        xcoords = []
        ycoords = []
        for pid in xrange(self.C_npout.value):
            xcoords.append(self.C_pOut[pid].xcoord)
            ycoords.append(self.C_pOut[pid].ycoord)
        self.pts = geometry.build_point_geometries(xcoords, ycoords)
        self.__swe = self.outputs()['Snow Water Equivalent']
        self.__swit = self.outputs()['Surface Water Input Total']
        self.__swe.addGeometries2(self.pts)
        self.__swit.addGeometries2(self.pts)



        # build input exchange items
        ds = nc.Dataset(C_watershedFile)
        Xlist = ds.variables['x']
        Ylist = ds.variables['y']
        self.geoms = self.build_geometries(Xlist, Ylist)
        self.inputs()['Precipitation'].addGeometries2(self.geoms)
        self.inputs()['Temperature'].addGeometries2(self.geoms)

        # set start, end, and timestep parameters
        ts = datetime.timedelta(hours=ModelDt)
        self.time_step(ts.total_seconds())
        sd = datetime.datetime(*ModelStartDate)
        ed = datetime.datetime(*ModelEndDate)
        self.simulation_start(sd)
        self.simulation_end(ed)
示例#10
0
    def __init__(self, config_params):
        super(ueb, self).__init__(config_params)

        # build inputs and outputs
        io = mdl.build_exchange_items_from_config(config_params)

        # set input and output exchange items
        self.inputs(value=io[stdlib.ExchangeItemType.INPUT])
        self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT])

        # grab the C library path and the control file path
        lib = config_params['lib']
        conFile = config_params['control']

        # load the UEB C library
        self.__uebLib = cdll.LoadLibrary(join(os.path.dirname(__file__), lib))

        # save the current directory for saving output data
        self.curdir = os.path.dirname(os.path.abspath(conFile))

        # the base directory for the control file is used to convert relative paths into absolute paths
        self.base_dir = os.path.dirname(conFile)

        # get param, sitevar, input, output, and watershed files
        with open(conFile, 'r') as f:
            # lines = f.readlines()
            lines = f.read().splitlines()  # this will auto strip the \n \r
            C_paramFile = os.path.join(self.base_dir, lines[1])
            C_sitevarFile = os.path.join(self.base_dir, lines[2])
            C_inputconFile = os.path.join(self.base_dir, lines[3])
            C_outputconFile = os.path.join(self.base_dir, lines[4])
            C_watershedFile = os.path.join(self.base_dir, lines[5])
            C_wsvarName = lines[6].split(' ')[0]
            C_wsycorName = lines[6].split(' ')[1]
            C_wsxcorName = lines[6].split(' ')[2]
            C_aggoutputconFile = os.path.join(self.base_dir, lines[7])
            C_aggoutputFile = os.path.join(self.base_dir, lines[8])
            ModelStartDate = [
                int(float(l)) for l in lines[9].split(' ') if l != ''
            ]
            ModelEndDate = [
                int(float(l)) for l in lines[10].split(' ') if l != ''
            ]
            ModelDt = float(lines[11])
            ModelUTCOffset = float(lines[12])
            inpDailyorSubdaily = bool(lines[13] == True)
            self.outtStride, outyStep, outxStep = [
                int(s) for s in lines[14].split(' ')
            ]

        C_wsxcorArray = c_float()
        C_wsycorArray = c_float()
        self.C_wsArray = pointer(pointer(c_int32()))
        self.C_dimlen1 = c_int()
        self.C_dimlen2 = c_int()
        totalgrid = 0
        self.C_wsfillVal = c_int(-9999)
        npar = c_int(32)
        tinitTime = c_int(0)
        self.C_parvalArray = pointer(c_float(0))
        numOut = 70  # hack: number of outputs?

        self.C_pOut = pointer(pointOutput())
        C_aggOut = pointer(aggOutput())
        self.C_ncOut = pointer(ncOutput())
        self.C_npout = c_int(0)
        self.C_nncout = c_int(0)
        C_naggout = c_int(0)
        C_nZones = c_int(0)

        C_tNameout = c_char_p("time")
        tunits = (c_char * 256)()
        C_tUnitsout = pointer(tunits)
        C_tlong_name = c_char_p("time")
        C_tcalendar = c_char_p("standard")
        self.t_out = pointer(c_float(0))
        C_out_fillVal = c_float(-9999.0)

        self.C_outDimord = c_int(0)
        C_aggoutDimord = c_int(1)
        self.outvarindx = c_int(17)
        # aggoutvarindx = c_int(17)
        # size = c_int()
        # rank = c_int()
        # irank = c_int()
        # jrank = c_int()
        # startTimeT = c_double(0.0)
        # TotalTime = c_double(0.0)
        # totalmodelrunTime = c_double(0.0)
        # TsReadTime = c_double(0.0)
        # TSStartTime = c_double()
        # ComputeStartTime = c_double()
        # ComputeTime = c_double(0.0)
        # OutWriteTime = c_double()

        self.uebVars = (c_char_p * 70)(
            "Year", "Month", "Day", "dHour", "atff", "HRI", "Eacl", "Ema",
            "conZen", "Ta", "P", "V", "RH", "Qsi", "Qli", "Qnet", "Us", "SWE",
            "tausn", "Pr", "Ps", "Alb", "QHs", "QEs", "Es", "SWIT", "QMs", "Q",
            "FM", "Tave", "TSURFs", "cump", "cumes", "cumMr", "Qnet", "smelt",
            "refDepth", "totalRefDepth", "cf", "Taufb", "Taufd", "Qsib",
            "Qsid", "Taub", "Taud", "Qsns", "Qsnc", "Qlns", "Qlnc", "Vz",
            "Rkinsc", "Rkinc", "Inmax", "intc", "ieff", "Ur", "Wc", "Tc",
            "Tac", "QHc", "QEc", "Ec", "Qpc", "Qmc", "Mc", "FMc", "SWIGM",
            "SWISM", "SWIR", "errMB")

        C_zName = c_char_p("Outletlocations")

        C_tcorvar = pointer((c_float * 13)())
        self.C_tsvarArray = pointer((c_float * 13)())
        C_tsvarArrayTemp = pointer((c_float * 5)())

        #todo: [#] == pointer, * == pointer

        self.C_tsvarArray = pointer((POINTER(c_float) * 13)())

        C_ntimesteps = pointer((c_int * 5)())

        # create pointer to instance of sitevar struct array
        self.C_strsvArray = pointer((sitevar * 32)())

        # create pointer to instance of inpforcvar struct array
        self.C_strinpforcArray = pointer((inpforcvar * 13)())

        # mask = c_float()
        # pcap_lookupnet(dev, ctypes.byref(net), ctypes.byref(mask), errbuf)

        # read watershed netcdf file
        self.__uebLib.readwsncFile(C_watershedFile, C_wsvarName, C_wsycorName,
                                   C_wsxcorName, byref(C_wsycorArray),
                                   byref(C_wsxcorArray), byref(self.C_wsArray),
                                   byref(self.C_dimlen1),
                                   byref(self.C_dimlen2),
                                   byref(self.C_wsfillVal))

        wsArray1D = numpy.empty((self.C_dimlen1.value * self.C_dimlen2.value),
                                dtype=numpy.float)
        for i in xrange(self.C_dimlen1.value):
            for j in xrange(self.C_dimlen2.value):
                wsArray1D[i * self.C_dimlen2.value + j] = self.C_wsArray[i][j]

        # zvalues is the unique set of wsArray1D
        zValues = list(set(wsArray1D))
        # fillset = [wsfillVal]

        # zVal is the set of zValues that do not equal wsFillVal
        zVal = [
            zValues[i] for i in xrange(len(zValues))
            if zValues[i] != self.C_wsfillVal
        ]

        C_nZones = len(zVal)
        z_ycor = [0.0 for i in xrange(C_nZones)]
        z_xcor = [0.0 for i in xrange(C_nZones)]

        # read params (#194)
        self.__uebLib.readParams(C_paramFile, byref(self.C_parvalArray), npar)

        # read site variables (#200)
        self.__uebLib.readSiteVars(C_sitevarFile, byref(self.C_strsvArray))

        # read 2d NetCDF Data
        for i in range(0, 32):
            a = self.C_strsvArray.contents[i]
            if a.svType == 1:
                # print "%d %s %s\n" % (i, a.svFile,a.svVarName)
                retvalue = self.__uebLib.read2DNC(
                    os.path.join(self.base_dir, a.svFile), a.svVarName,
                    byref(a.svArrayValues))

        #//read input /forcing control file--all possible entries of input control have to be provided
        #readInputForcVars(inputconFile, strinpforcArray);

        # read input force variables (main.cpp, line 219)
        self.__uebLib.readInputForcVars(cast(C_inputconFile, c_char_p),
                                        self.C_strinpforcArray)

        # elog.info('UEB Start Date: %s' % sd.strftime("%m-%d-%Y %H:%M:%S"))
        # elog.info('UEB End Date: %s' % ed.strftime("%m-%d-%Y %H:%M:%S"))

        # calculate model time span as a julian date (main.cpp, line 220)
        modelSpan =  jdutil.datetime_to_jd(datetime.datetime(*ModelEndDate)) - \
                     jdutil.datetime_to_jd(datetime.datetime(*ModelStartDate))

        # setup the model start dates as UEB expects them
        ModelStartHour = ModelStartDate.pop(
            -1)  # an integer representing the start hour (24 hour time)
        ModelEndHour = ModelEndDate.pop(
            -1)  # an integer representing the end hour (24 hour time)
        # ModelStartDate is a 3 element array: [year, month, day]
        # ModelEndDate is a 3 element array: [year, month, day]

        # convert Simulation Time parameters into ctypes
        self.C_ModelStartDate = (c_int * len(ModelStartDate))(*ModelStartDate)
        self.C_ModelEndDate = (c_int * len(ModelEndDate))(*ModelEndDate)
        self.C_ModelDt = c_double(ModelDt)
        self.C_ModelUTCOffset = c_double(ModelUTCOffset)
        self.C_ModelStartHour = c_double(ModelStartHour)
        self.C_ModelEndHour = c_double(ModelEndHour)

        # calculate model time steps (main.cpp, line 222)
        self.numTimeStep = int(math.ceil(modelSpan * (24. / ModelDt))) + 1

        # initialize C_tsvarArray values (this replaces __uebLib.readTextData)
        self.initialize_timeseries_variable_array(self.C_strinpforcArray,
                                                  self.numTimeStep)

        # NOTE: C_strinpforcArray stores info about the forcing data files

        # # read forcing data (main.cpp, line 226)
        # if self.C_strsvArray.contents[16].svType != 3: # no accumulation zone (fixme: ???)
        #     for it in xrange(13):
        #         inftype = self.C_strinpforcArray.contents[it].infType
        #         print 'infFile: ',self.C_strinpforcArray.contents[it].infFile
        #         if inftype == 0:
        #
        #             # read the files stored in C_strinpforcArray and populated C_tsvarArray
        #             self.__uebLib.readTextData(os.path.join(self.base_dir, self.C_strinpforcArray.contents[it].infFile), byref(self.C_tsvarArray.contents[it]), byref(C_ntimesteps[0]))
        #
        #         elif inftype == 2 or inftype == -1:
        #             self.C_tsvarArray.contents[it] = (c_float * 2)()
        #             C_ntimesteps.contents[0] = 2
        #             # copy the default value if a single value is the option
        #             self.C_tsvarArray.contents[it][0] = self.C_strinpforcArray.contents[it].infType
        #             self.C_tsvarArray.contents[it][1] = self.C_strinpforcArray.contents[it].infdefValue

        # :: this array is initialized to (numOut+1, numTimeStep+1) rather than (numOut, numTimeStep)
        # :: b/c otherwise the calculations from RunUEB are incorrect for the first row.
        # :: e.g.
        # ::     2009 2010 5   30.000        23.999979
        # ::  rather than:
        # ::     2009 10 1    0.000         0.569902
        # ::
        # :: I thought this was b/c numpy.float32 (and 64) are smaller than c_float, however this change
        # :: below didn't fix the problem.
        # ::
        # create a numpy array for outputs
        self.outvarArray = numpy.zeros(shape=(numOut + 1, self.numTimeStep),
                                       dtype=numpy.float,
                                       order="C")
        # arrays_old = self.outvarArray.astype(numpy.float32)
        arrays = self.outvarArray.astype(c_float)
        rows, cols = self.outvarArray.shape
        arrays_as_list = list(arrays)
        #get ctypes handles
        ctypes_arrays = [
            numpy.ctypeslib.as_ctypes(array) for array in arrays_as_list
        ]
        #Pack into pointer array
        self.C_outvarArray = (POINTER(c_float) * rows)(*ctypes_arrays)

        # x = numpy.zeros(shape=(numOut, self.numTimeStep), dtype=numpy.float, order="C")
        # _floatpp = numpy.ctypeslib.ndpointer(dtype=numpy.uintp, ndim=1, flags='C')
        # xpp = (x.__array_interface__['data'][0] + numpy.arange(x.shape[0])*x.strides[0]).astype(numpy.uintp)
        # self.C_outvarArray = pointer(((POINTER(c_float) * self.numTimeStep) * numOut)())

        # a = (c_float * self.numTimeStep)()
        # outvarArray = pointer((a * numOut)())
        # for i in xrange(numOut):
        #     outvarArray[i] = pointer((c_float * self.numTimeStep)())

        # total grid size to compute progess
        totalgrid = self.C_dimlen1.value * self.C_dimlen2.value

        # read output control file (main.cpp, line 251)
        # readOutputControl(outputconFile, aggoutputconFile, pOut, ncOut, aggOut, npout, nncout, naggout);

        self.__uebLib.readOutputControl(cast(C_outputconFile, c_char_p),
                                        cast(C_aggoutputconFile, c_char_p),
                                        byref(self.C_pOut),
                                        byref(self.C_ncOut), byref(C_aggOut),
                                        byref(self.C_npout),
                                        byref(self.C_nncout), byref(C_naggout))

        # create output netcdf
        self.C_outtSteps = self.numTimeStep / self.outtStride
        self.t_out = numpy.empty(shape=(self.C_outtSteps),
                                 dtype=numpy.float,
                                 order="C")
        for i in xrange(self.C_outtSteps):
            self.t_out[i] = i * self.outtStride * ModelDt

        # initialize the output arrays
        aggoutvarArray = numpy.zeros(
            (C_nZones, C_naggout.value, self.C_outtSteps), dtype=numpy.float)
        totalAgg = numpy.empty((self.C_outtSteps, ), dtype=numpy.float)
        ZonesArr = numpy.zeros((C_nZones, ), dtype=numpy.int32)

        # main.cpp, line 290
        # CREATE 3D NC OUTPUT FILES
        # convert self.t_out into a float pointer
        C_t_out = self.t_out.ctypes.data_as(POINTER(c_float))
        for i in xrange(self.C_nncout.value):
            '''
            for (int icout = 0; icout < nncout; icout++)
                retvalue = create3DNC_uebOutputs(ncOut[icout].outfName, (const char*)ncOut[icout].symbol, (const char*)ncOut[icout].units, tNameout, tUnitsout,
            tlong_name, tcalendar, outtSteps, outDimord, self.t_out, &out_fillVal, watershedFile, wsvarName, wsycorName, wsxcorName);
            '''

            retvalue = self.__uebLib.create3DNC_uebOutputs(
                self.C_ncOut[i].outfName, cast(self.C_ncOut[i].symbol,
                                               c_char_p),
                cast(self.C_ncOut[i].units,
                     c_char_p), C_tNameout, C_tUnitsout, C_tlong_name,
                C_tcalendar, self.C_outtSteps, self.C_outDimord, C_t_out,
                byref(C_out_fillVal), C_watershedFile, C_wsvarName,
                C_wsycorName, C_wsxcorName)

        # CREATE 3D NC AGGREGATE OUTPUT FILE
        # convert z_ycor and x_xcor from list into ctype
        C_z_xcor = numpy.asarray(z_xcor).ctypes.data_as(POINTER(c_float))
        C_z_ycor = numpy.asarray(z_ycor).ctypes.data_as(POINTER(c_float))
        retvalue = self.__uebLib.create3DNC_uebAggregatedOutputs(
            C_aggoutputFile, C_aggOut, C_naggout, C_tNameout, C_tUnitsout,
            C_tlong_name,
            C_tcalendar, self.C_outtSteps, C_aggoutDimord, C_t_out,
            byref(C_out_fillVal), C_watershedFile, C_wsvarName, C_wsycorName,
            C_wsxcorName, C_nZones, C_zName, C_z_ycor, C_z_xcor)

        # todo: create output element set
        # print 'Output Calculations available at: '
        # for pid in xrange(self.C_npout.value):
        #     print "  Point(",self.C_pOut[pid].xcoord,", ",self.C_pOut[pid].ycoord,') '

        # todo: This is where UEB grid points are defined!, expose these as input/output spatial objects
        # main.cpp, line 303
        self.activeCells = []
        # print 'Calculations will be performed at: '
        for iy in xrange(self.C_dimlen1.value):
            for jx in xrange(self.C_dimlen2.value):
                if self.C_wsArray[iy][
                        jx] != self.C_wsfillVal.value and self.C_strsvArray.contents[
                            16].svType != 3:
                    # print "  Point(",jx,", ",iy,') '
                    self.activeCells.append((iy, jx))

        # build output exchange items
        xcoords = []
        ycoords = []
        for pid in xrange(self.C_npout.value):
            xcoords.append(self.C_pOut[pid].xcoord)
            ycoords.append(self.C_pOut[pid].ycoord)
        self.pts = geometry.build_point_geometries(xcoords, ycoords)
        self.__swe = self.outputs()['Snow Water Equivalent']
        self.__swit = self.outputs()['Surface Water Input Total']
        self.__swe.addGeometries2(self.pts)
        self.__swit.addGeometries2(self.pts)

        # build input exchange items
        ds = nc.Dataset(C_watershedFile)
        Xlist = ds.variables['x']
        Ylist = ds.variables['y']
        self.geoms = self.build_geometries(Xlist, Ylist)
        self.inputs()['Precipitation'].addGeometries2(self.geoms)
        self.inputs()['Temperature'].addGeometries2(self.geoms)

        # set start, end, and timestep parameters
        ts = datetime.timedelta(hours=ModelDt)
        self.time_step(ts.total_seconds())
        sd = datetime.datetime(*ModelStartDate)
        ed = datetime.datetime(*ModelEndDate)
        self.simulation_start(sd)
        self.simulation_end(ed)
示例#11
0
import jdutil
import datetime
import sys

#print('Args: ', sys.argv)

dia = sys.argv[1]
mes = sys.argv[2]
anio = sys.argv[3]
hora = sys.argv[4]
min = sys.argv[5]

#print('Fecha: '+dia+'/'+mes+'/'+anio+' Hora: '+hora+':'+min)

d = datetime.datetime(int(anio), int(mes), int(dia), int(hora), int(min))
print(jdutil.datetime_to_jd(d))
示例#12
0
def main(srcpath, outfile):
    '''
    Find all the files in a path and extract the julian date of any pictures found
    '''
    srcpath = '/'.join(srcpath.split('\\'))
    candidate_files = []

    print "Scanning directory: ", srcpath

    for (dirpath, dirnames, filenames) in walk(srcpath):
        for file in filenames:
            fullpath = os.path.join(dirpath, file)
            candidate_files.append('/'.join(fullpath.split('\\')))
        break  # only ingest the first directory found. TODO: make a track per directory

    refs = []

    print "Reading EXIF data"

    file_count = 0
    max_files = 1000

    for path in candidate_files:
        fh = open(path, "rb")
        tags = exifread.process_file(fh, details=False)
        fh.close()

        if 'EXIF DateTimeOriginal' in tags:
            rpath = os.path.relpath(path, srcpath)
            ref = ImageRef(path.split('/')[-1], rpath)

            datestr = tags['EXIF DateTimeOriginal'].values
            datestrs = datestr.split(' ')
            cal = datestrs[0].split(':')
            hr = datestrs[1].split(':')
            dto = datetime.datetime(int(cal[0]), int(cal[1]), int(cal[2]),
                                    int(hr[0]), int(hr[1]), int(hr[2]))
            julian = jdutil.datetime_to_jd(dto)

            ref.time_stamp = julian

            if 'EXIF ExposureTime' in tags:
                et = tags['EXIF ExposureTime'].values[0]
                ref.exposure_time = float(et.num) / float(et.den)
            else:
                ref.exposure_time = 1.0 / 100.0  # arbitrary

            refs.append(ref)

        file_count += 1
        if file_count > max_files:
            break

    refs.sort()

    epoch = refs[0].time_stamp

    for ref in refs:
        print(ref.time_stamp - epoch) * 24 * 3600, ref.path

    timeline = otio.schema.Timeline()
    timeline.name = "Photos"  # TODO pass the time line name
    track = otio.schema.Sequence()
    track.name = "Photo track"  # TODO make a track per day
    track.metadata = {"epoch": epoch}
    timeline.tracks.append(track)

    for i, ref in enumerate(refs):
        next_i = min(i + 1, len(refs) - 1)
        ts = (ref.time_stamp - epoch) * 24.0 * 3600.0  # seconds
        ts_next = (refs[next_i].time_stamp - epoch) * 24.0 * 3600.0
        duration = ts_next - ts

        # exposure time is already in seconds
        image_time = opentime.TimeRange(
            opentime.RationalTime(ts, 1),
            opentime.RationalTime(ref.exposure_time, 1.0))

        media_reference = otio.media_reference.External(
            target_url="file://" + ref.path, available_range=image_time)
        media_reference.name = ref.name

        clip = otio.schema.Clip(name=ref.name)
        clip.media_reference = media_reference
        clip.source_range = opentime.TimeRange(
            opentime.RationalTime(0, 1.0),
            opentime.RationalTime(duration, 1.0))
        track.append(clip)

    otio.adapters.write_to_file(timeline, outfile)