Esempio n. 1
0
def get_two_best_sondes(date_str, **kwargs):
	sonde_file=kwargs.get('sonde_file', '/data/twpice/darwin.txt')
	#outdir=kwargs.get('outdir', '/flurry/home/scollis/bom_mds/dealias/')
	sonde_file=kwargs.get('sonde_file', '/data/twpice/darwin.txt')
	outdir=kwargs.get('outdir', '/home/scollis/bom_mds/dealias/')
	tim_date=num2date(datestr2num(date_str))
	sonde_list=read_sounding_within_a_day(sonde_file, tim_date)
	launch_dates=[sonde['date_list'][0] for sonde in sonde_list]
	#print launch_dates
	launch_date_offset=[date2num(sonde['date_list'][0])- date2num(tim_date)  for sonde in sonde_list]
	sonde_made_it=False
	candidate=0
	while not(sonde_made_it):
		best_sonde=sonde_list[argsort(abs(array(launch_date_offset)))[candidate]]
		candidate=candidate+1
		sonde_made_it=best_sonde['alt(m)'][-1] > 18000.
		if not sonde_made_it: print "Sonde Burst at ", best_sonde['alt(m)'][-1], "m rejecting"
	print "Sonde Burst at ", best_sonde['alt(m)'][-1], "m Accepting"
	sonde_made_it=False
	while not(sonde_made_it):
		sec_best_sonde=sonde_list[argsort(abs(array(launch_date_offset)))[candidate]]
		candidate=candidate+1
		sonde_made_it=sec_best_sonde['alt(m)'][-1] > 18000.
		if not sonde_made_it: print "Sonde Burst at ", sec_best_sonde['alt(m)'][-1], "m rejecting"
	print "Sonde Burst at ", sec_best_sonde['alt(m)'][-1], "m Accepting"
	print 'Time of radar: ', tim_date, ' Time of  best sonde_launch: ', best_sonde['date_list'][0], ' Time of sonde_termination: ', best_sonde['date_list'][-1]
	print 'Time of radar: ', tim_date, ' Time of second sonde_launch: ', sec_best_sonde['date_list'][0], ' Time of sonde_termination: ', best_sonde['date_list'][-1]
	for i in range(len(sonde_list)):
		best_sonde=sonde_list[argsort(abs(array(launch_date_offset)))[i]]
		print 'Time of radar: ', tim_date, ' Time of  best sonde_launch: ', best_sonde['date_list'][0], ' Offset', abs(date2num(best_sonde['date_list'][0])-date2num(tim_date))*24.0
	return best_sonde, sec_best_sonde
Esempio n. 2
0
    def generate_filename(self, fld='chl', fldtype="DAY", **kwargs):
        """Generate filename"""
        if len(kwargs):
            self._timeparams(**kwargs)
        ydmax = (pl.date2num(dtm(self.yr, 12, 31)) -
                 pl.date2num(dtm(self.yr,  1,  1))) + 1
        if fldtype == "MC":
            self.add_mnclim()
            datestr = self.mc_datedict[self.mn]
        if "mo" in fldtype.lower():
            self.add_filepreflist(fldtype="mo")
            datestr = self.mo_fileprefs[self.yr*100 + self.mn]
        elif fldtype == "DAY":
            datestr = "%i%03i" % (self.yr, self.yd)
        elif fldtype == "8D":
            yd1 = np.arange(1,365,8)
            yd2 = np.arange(8,370,8)
            yd2[-1] = ydmax
            pos = np.nonzero(self.yd >= yd1)[0].max()
            datestr = ("%i%03i%i%03i" % 
                       (self.yr, yd1[pos], self.yr, yd2[pos]))
        elif fldtype == "CU":
            self.a_cu_url_9km = 'MODISA/Mapped/Cumulative/4km/chlor/'
            datestr = max(self._retrieve_datestamps(self.a_cu_url_9km))
        else:
            raise TypeError, "File average type not included"
        return("%s%s.L3m_%s_%s_%s%s.nc" % (self.fp, datestr, fldtype,
										self.vc[fld][0], self.res[0],
                                        self.vc[fld][1]))
Esempio n. 3
0
def main():
    print("start program")

    countTaxis()

    # a figure (chart) where we add the bar's and change the axis properties
    fig = figure()
    ax = fig.add_subplot(111)

    # set the width of the bar to interval-size
    barWidth = date2num(intervalDate + intervalDelta) - date2num(intervalDate)
    # add a bar with specified values and width
    ax.bar(date2num(barList.keys()), barList.values(), width=barWidth)

    # set the x-Axis to show the hours
    ax.xaxis.set_major_locator(HourLocator())
    ax.xaxis.set_major_formatter(DateFormatter("%H:%M"))
    ax.xaxis.set_minor_locator(MinuteLocator())
    ax.grid(True)
    xlabel('Zeit (s)')
    ylabel('Quantit' + u'\u00E4' + 't')
    title('Menge der Taxis im VLS-Gebiet')
    ax.autoscale_view()

    # shows the text of the x-axis in a way that it looks nice
    fig.autofmt_xdate()

    # display the chart
    show()
Esempio n. 4
0
    def movie(self):
        import matplotlib as mpl
        mpl.rcParams['axes.labelcolor'] = 'white'
        pl.close(1)
        pl.figure(1, (8, 4.5), facecolor='k')
        miv = np.ma.masked_invalid
        figpref.current()
        jd0 = pl.date2num(dtm(2005, 1, 1))
        jd1 = pl.date2num(dtm(2005, 12, 31))
        mp = projmaps.Projmap('glob')
        x, y = mp(self.llon, self.llat)
        for t in np.arange(jd0, jd1):
            print pl.num2date(t)
            self.load(t)

            pl.clf()
            pl.subplot(111, axisbg='k')
            mp.pcolormesh(x,
                          y,
                          miv(np.sqrt(self.u**2 + self.v**2)),
                          cmap=cm.gist_heat)
            pl.clim(0, 1.5)
            mp.nice()
            pl.title(
                '%04i-%02i-%02i' % (pl.num2date(t).year, pl.num2date(t).month,
                                    pl.num2date(t).day),
                color='w')
            pl.savefig('/Users/bror/oscar/norm/%03i.png' % t,
                       bbox_inches='tight',
                       facecolor='k',
                       dpi=150)
Esempio n. 5
0
 def _l3read_hdf(self, fieldname='l3m_data'):
     self.vprint( "Reading hdf file")
     sd = SD(self.filename, SDC.READ)
     ds = sd.select(fieldname)
     field     = ds[self.j1:self.j2, self.i1:self.i2].copy()
     intercept = ds.attributes()['Intercept']
     slope     = ds.attributes()['Slope']
     try:
         nanval = ds.attributes()['Fill']
     except:
         nanval = 65535
     try:
         base  = ds.attributes()['Base']
     except KeyError:
         base = -999
     if 'Scaled Data Maximum' in sd.attributes().keys():
         self.maxval = sd.attributes()['Scaled Data Maximum']
         self.minval = sd.attributes()['Scaled Data Minimum']
     elif 'Suggested Image Scaling Maximum' in sd.attributes().keys():
         self.maxval = sd.attributes()['Suggested Image Scaling Maximum']
         self.minval = sd.attributes()['Suggested Image Scaling Minimum']
     else:
         self.minval = self.vc[fld][2]
         self.maxval = self.vc[fld][3]
     start_iso = (pl.date2num(dtm(
                     sd.attributes()['Period Start Year'],1,1)) + 
                     sd.attributes()['Period Start Day'] - 1)
     end_iso   = (pl.date2num(dtm(
                     sd.attributes()['Period End Year'],1,1)) + 
                     sd.attributes()['Period End Day'] - 1)
     self.jd   = ((start_iso+end_iso)/2)
     self.date = pl.num2date(self.jd)
     return field,base,intercept,slope
Esempio n. 6
0
 def plot_coverage(self, siteid):
     """
     Makes a bar plot (ganntt like) for the time coverage of datasets at a site
     """
     st = io.BytesIO()
     with db.session_scope() as session:
         import matplotlib
         matplotlib.use('Agg', warn=False)
         import pylab as plt
         import numpy as np
         ds = session.query(db.Dataset).filter_by(_site=int(siteid)).order_by(
             db.Dataset._source, db.Dataset._valuetype, db.Dataset.start).all()
         left = plt.date2num([d.start for d in ds])
         right = plt.date2num([d.end for d in ds])
         btm = np.arange(-.5, -len(ds), -1)
         # return 'left=' + str(left) + ' right=' + str(right) + ' btm=' + str(btm)
         fig = plt.figure()
         ax = fig.gca()
         ax.barh(left=left, width=right - left, bottom=btm,
                 height=0.9, fc='0.75', ec='0.5')
         for l, b, d in zip(left, btm, ds):
             ax.text(l, b + .5, '#%i' % d.id, color='k', va='center')
         ax.xaxis_date()
         ax.set_yticks(btm + .5)
         ax.set_yticklabels(
             [d.source.name + '/' + d.valuetype.name for d in ds])
         ax.set_position([0.3, 0.05, 0.7, 0.9])
         ax.set_title('Site #' + siteid)
         ax.set_ylim(-len(ds) - .5, .5)
         ax.grid()
         fig.savefig(st, dpi=100)
     return st.getvalue()
Esempio n. 7
0
 def generate_filename(self, fld='chl', fldtype="DAY", **kwargs):
     """Generate filename"""
     if len(kwargs):
         self._timeparams(**kwargs)
     ydmax = (pl.date2num(dtm(self.yr, 12, 31)) -
              pl.date2num(dtm(self.yr, 1, 1))) + 1
     if fldtype == "MC":
         self.add_mnclim()
         datestr = self.mc_datedict[self.mn]
     if "mo" in fldtype.lower():
         self.add_filepreflist(fldtype="mo")
         datestr = self.mo_fileprefs[self.yr * 100 + self.mn]
     elif fldtype == "DAY":
         datestr = "%i%03i" % (self.yr, self.yd)
     elif fldtype == "8D":
         yd1 = np.arange(1, 365, 8)
         yd2 = np.arange(8, 370, 8)
         yd2[-1] = ydmax
         pos = np.nonzero(self.yd >= yd1)[0].max()
         datestr = ("%i%03i%i%03i" % (self.yr, yd1[pos], self.yr, yd2[pos]))
     elif fldtype == "CU":
         self.a_cu_url_9km = 'MODISA/Mapped/Cumulative/4km/chlor/'
         datestr = max(self._retrieve_datestamps(self.a_cu_url_9km))
     else:
         raise TypeError, "File average type not included"
     return ("%s%s.L3m_%s_%s_%s%s.nc" %
             (self.fp, datestr, fldtype, self.vc[fld][0], self.res[0],
              self.vc[fld][1]))
Esempio n. 8
0
def plotaverages(data):
    months = list(data.apply(set)[7])  #we need a list of all the uniqe months
    months1 = []
    for row in months:
        months1.append(datetime.strptime(
            row,
            "%Y-%m"))  #create a list with unique months as datetime values
    medians = []
    for i in months:  #for each date in months to iterate loop below
        dd = data[
            data['Month'] ==
            i]  #change the dataset to only include thos rows with unique months
        price = []
        for row in dd[
                'Price1']:  #for each row that corresponds with that unique month
            price.append(float(row))  #add it to a list
        medians.append(py.median(
            price))  #then add the value to a means list, then iterate
    x = months1  #plot
    y = medians
    x, y = (
        list(i) for i in zip(*sorted(zip(x, y)))
    )  #to numerically order both arrays in date order, relating the individual values from each array
    plt.plot(pl.date2num(x), y)
    plt.xlim(733400, 735400)
    plt.gca().xaxis.set_major_formatter(mdates.DateFormatter(
        '%m/%Y'))  #reformat x axis to show date in correct format
    plt.xlabel('Date')
    plt.ylabel('Price in Pounds')
    plt.title('Monthly Median House Prices')
    p = py.polyfit(pl.date2num(x), y, 1)  #fit a straight line with order 1
    plt.plot(
        pl.date2num(x), p[0] * pl.date2num(x) + p[1], 'r-'
    )  #plot x against the coeffecients ofthe line, p[0]x + p[1] == mx + c
    plt.show()
Esempio n. 9
0
 def movie(self):
     import matplotlib as mpl
     mpl.rcParams['axes.labelcolor'] = 'white'
     pl.close(1)
     pl.figure(1,(8,4.5),facecolor='k')
     miv = np.ma.masked_invalid
     figpref.current()
     jd0 = pl.date2num(dtm(2005,1,1))
     jd1 = pl.date2num(dtm(2005,12,31))
     mp = projmaps.Projmap('glob')
     x,y = mp(self.llon,self.llat)
     for t in np.arange(jd0,jd1):
         print pl.num2date(t)
         self.load(t)
     
         pl.clf()
         pl.subplot(111,axisbg='k')
         mp.pcolormesh(x,y,
                       miv(np.sqrt(self.u**2 +self.v**2)),
                       cmap=cm.gist_heat)
         pl.clim(0,1.5)
         mp.nice()
         pl.title('%04i-%02i-%02i' % (pl.num2date(t).year,
                                      pl.num2date(t).month,
                                      pl.num2date(t).day),
                  color='w')
         pl.savefig('/Users/bror/oscar/norm/%03i.png' % t,
                    bbox_inches='tight',facecolor='k',dpi=150)
Esempio n. 10
0
def plot_1d_comparison(data_dict,style_dict,time_dict,start_date,finish_date,file_path,axis_label,interval=3):
    """ 
    """
    start_time = date2num(datetime.strptime(start_date, "%Y-%m-%d %H:%M:%S"))
    finish_time = date2num(datetime.strptime(finish_date, "%Y-%m-%d %H:%M:%S"))
        
    # plot 1d graph...
    fig = figure(figsize=(15,8),dpi=90)
    ax = fig.add_axes([.05,.12,.9,.85])
    max_value = 0.0
    for key, data_arr in data_dict.iteritems():
        ax.plot(time_dict[key],data_arr,style_dict[key], label=key)
        data_arr = vtktools.arr(data_arr)
        if data_arr.max() > max_value:
            max_value = data_arr.max()
    max_value += max_value * 0.1
    
    dateFmt = mpl.dates.DateFormatter('%m/%Y')
    ax.xaxis.set_major_formatter(dateFmt)
    monthsLoc = mpl.dates.MonthLocator(interval=interval)
    ax.xaxis.set_major_locator(monthsLoc)
    labels = ax.get_xticklabels()
    for label in labels:
        label.set_rotation(30) 
    ax.set_ylim(max_value, 0)
    ax.set_xlim(start_time,finish_time)
    xlabel('Date (mm/yyyy)')
    ylabel(axis_label)
    legend(loc=0)
    
    form = file_path.split('.')[-1].strip()
    savefig(file_path, dpi=90,format=form)
    close(fig)
Esempio n. 11
0
 def _timeparams(self, **kwargs):
     """Calculate time parameters from given values"""
     for key in kwargs.keys():
         self.__dict__[key] = kwargs[key]
     if "date" in kwargs:
         self.jd = pl.datestr2num(kwargs['date'])
         self.jd = int(self.jd) if self.jd == int(self.jd) else self.jd
     elif ('yd' in kwargs) & ('yr' in kwargs):
         if self.yd < 1:
             self.yr = self.yr -1
             ydmax = (pl.date2num(dtm(self.yr, 12, 31)) -
                      pl.date2num(dtm(self.yr,  1,  1))) + 1    
             self.yd = ydmax + self.yd     
         self.jd = self.yd + pl.date2num(dtm(self.yr,1,1)) - 1
     elif  ('yr' in kwargs) & ('mn' in kwargs) & ('dy' in kwargs):
         self.jd = pl.date2num(dtm(self.yr,self.mn,self.dy))
     elif not 'jd' in kwargs:
         if hasattr(self, 'defaultjd'):
             self.jd = self.defaultjd
         else:
             raise KeyError, "Time parameter missing"
     if hasattr(self,'hourlist'):
         dd = self.jd-int(self.jd)
         ddlist = np.array(self.hourlist).astype(float)/24
         ddpos = np.argmin(np.abs(ddlist-dd))
         self.jd = int(self.jd) + ddlist[ddpos]
     self._jd_to_dtm()
Esempio n. 12
0
    def setTicks(self):
        """ set x and y axes major and minor tick locators, formatters and labels """

        # define tick locators
        self.xMajor = LinearLocator(numticks = 16)
        self.xMinor = LinearLocator(numticks = 76)
        self.yMajor = LinearLocator(numticks = 5)
        self.yMinor = LinearLocator(numticks = 17)

        self.starttime = datetime.datetime.today()
        self.starttime_tick = time.mktime(self.starttime.timetuple())
        self.currenttime = self.starttime + datetime.timedelta(seconds = 3)
        self.currenttime_tick = time.mktime(self.currenttime.timetuple())
        self.lines = self.axes.plot([self.starttime], [0], 'r-')

        # set tick locators
        self.axes.xaxis.set_major_locator(self.xMajor)
        self.axes.xaxis.set_minor_locator(self.xMinor)
        self.axes.yaxis.set_major_locator(self.yMajor)
        self.axes.yaxis.set_minor_locator(self.yMinor)

        self.axes.set_xlim((date2num(self.starttime),date2num(self.currenttime)))
        # create x-axis tick labels (seconds)
        self.axes.xaxis.set_ticklabels(self.createXTickLabels(self.currenttime_tick), rotation = 30, ha = "right", size = 'smaller', name = 'Calibri')
        self.axes.set_ylim(self.ymin,self.ymax)
        # create y-axis tick labels (mV)
        self.axes.yaxis.set_ticklabels(self.createYTickLabels(self.ymin), size = 'smaller', name = 'Calibri')

        # set the properties of the minor axes
        self.axes.grid(color = 'lightgrey', linewidth = 0.05, linestyle = ':', which = 'minor')
        # set the properties of the major axes
        self.axes.grid(color = 'slategrey', linewidth = 0.5, linestyle = '-', which = 'major')
	def setXYlim(self, data, para):
		for label in self.axes.get_xaxis().get_ticklabels():
			label.set_fontsize(9)

		if data[1][-1] > 0: 	#正
			if data[1][-1] > para["open"]*0.75:
				self.axes.axhline(y = para["open"], linestyle = "--", linewidth = 0.5, color = "gray")
			if data[1][-1] > para["stop"]*0.85:
				self.axes.axhline(y = para["stop"], linestyle = "--", linewidth = 0.5, color = "red")
			if data[1][-1] < para["close"]*1.15:
				self.axes.axhline(y = para["close"], linestyle = "--", linewidth = 0.5, color = "green")
		else: 					#反
			if data[1][-1] < -para["open"]*0.75:
				self.axes.axhline(y = -para["open"], linestyle = "--", linewidth = 0.5, color = "gray")
			if data[1][-1] < -para["stop"]*0.85:
				self.axes.axhline(y = -para["stop"], linestyle = "--", linewidth = 0.5, color = "red")
			if data[1][-1] > -para["close"]*1.15:
				self.axes.axhline(y = -para["close"], linestyle = "--", linewidth = 0.5, color = "green")

		thisDate = copy.copy(data[0][-1])
		if data[0][-1].time() <= datetime.time(11,30,0):
			self.axes.axis(xmin=pylab.date2num(thisDate.replace(hour=9,minute=30,second=0)), xmax=pylab.date2num(thisDate.replace(hour=11,minute=30)))
		else:
			self.axes.axis(xmin=pylab.date2num(thisDate.replace(hour=13,minute=0,second=0)), xmax=pylab.date2num(thisDate.replace(hour=15,minute=0)))
		pass
Esempio n. 14
0
    def plotSceleton(self,
                     axes,
                     xlabel='Power',
                     ylabel='Period',
                     scaling='log',
                     min_h=0.,
                     max_h=1000.):
        cw = self._cw

        scales = cw.getscales()
        pwr = self.getSceleton(cw.getpower())
        y = cw.fourierwl * scales
        axes.imshow(pwr[0],
                    cmap=plb.cm.hot_r,
                    extent=[
                        plb.date2num(self._x[0]),
                        plb.date2num(self._x[-1]), y[-1], y[0]
                    ],
                    aspect='auto',
                    interpolation=None)
        axes.xaxis_date()
        axes.imshow(pwr[1],
                    cmap=plb.cm.hot_r,
                    extent=[
                        plb.date2num(self._x[0]),
                        plb.date2num(self._x[-1]), y[-1], y[0]
                    ],
                    aspect='auto',
                    interpolation=None)
        axes.xaxis_date()
        if scaling == "log":
            axes.set_yscale('log')
        axes.set_ylim(y[0], y[-1])
Esempio n. 15
0
    def initPlot(self):
        """ redraw the canvas to set the initial x and y axes when plotting starts """

        self.starttime = datetime.datetime.today()
        self.currenttime = self.starttime + datetime.timedelta(seconds=3)
        self.endtime = self.starttime + datetime.timedelta(seconds=15)
        self.timeaxis = num2date(drange(self.starttime, self.endtime, datetime.timedelta(milliseconds=10)))

        self.xvalues.append(self.timeaxis[0])
        self.yvalues.append(self.parentPanel.myECG.ecg_leadII[0])

        # for counter purposes only
        self.ybuffer = self.yvalues

        self.lines[0].set_data(self.xvalues, self.yvalues)

        self.axes.set_xlim((date2num(self.starttime), date2num(self.currenttime)))
        self.axes.xaxis.set_ticklabels(
            self.createXTickLabels(self.currenttime), rotation=30, ha="right", size="smaller", name="Calibri"
        )

        self.samples_counter += 1
        self.ysamples_counter += 1

        self.buff_counter = 1
Esempio n. 16
0
    def setTicks(self):
        """ set x and y axes major and minor tick locators, formatters and labels """

        # set tick locators
        self.xMajor = LinearLocator(numticks=16)
        self.xMinor = LinearLocator(numticks=76)
        self.yMajor = LinearLocator(numticks=13)
        self.yMinor = LinearLocator(numticks=61)

        self.starttime = datetime.datetime.today()
        self.currenttime = self.starttime + datetime.timedelta(seconds=3)

        self.lines = self.axes.plot([self.starttime], [0], "r-")

        self.axes.xaxis.set_major_locator(self.xMajor)
        self.axes.xaxis.set_minor_locator(self.xMinor)
        self.axes.yaxis.set_major_locator(self.yMajor)
        self.axes.yaxis.set_minor_locator(self.yMinor)

        self.axes.set_xlim((date2num(self.starttime), date2num(self.currenttime)))
        self.axes.xaxis.set_ticklabels(
            self.createXTickLabels(self.currenttime), rotation=30, ha="right", size="smaller", name="Calibri"
        )
        self.axes.set_ylim(self.ymin, self.ymax)
        self.axes.yaxis.set_ticklabels(self.createYTickLabels(self.ymin), size="smaller", name="Calibri")

        self.axes.grid(color="lightgrey", linewidth=0.05, linestyle=":", which="minor")
        self.axes.grid(color="slategrey", linewidth=0.5, linestyle="-", which="major")
Esempio n. 17
0
def plot(dates,time,mode,temp):

    global nfigure, datemin, datemax

    v, a, b, c = invers(temp, time-datemin)
    amp,dphi=np.sqrt(a**2+b**2),np.arctan2(b,a)
    if dphi < 0:
        dphi = dphi + 2*np.pi
    phase[i,j], amplitude[i,j] = dphi,amp
    print('Mean T: {:0.3f} °'.format(np.mean(t)))
    print('Vitesse: {:0.3f} °/yr'.format(v))
    print('Amplitude: {:0.3f} °'.format(np.sqrt(a**2+b**2)))
    print('Phase Shift: {:0.3f} rad'.format(np.arctan2(b,a)))
    fit = seasonal(time-datemin,v,a,b,c)

    # plot data
    fig = plt.figure(nfigure,figsize=(12,5))
    ax = fig.add_subplot(1,1,1)
    # convert idates to num
    x = date2num(dates)
    xmin,xmax=datetime.datetime(np.int(datemin), 01, 01),datetime.datetime(datemax, 01, 01)
    xlim=date2num(np.array([xmin,xmax]))
    # format the ticks
    ax.xaxis.set_major_formatter(mdates.DateFormatter("%Y/%m/%d"))
    ax.plot(x,temp, '-bo',ms=.5,lw=.1,color='blue',label='ERAI point {}, {}'.format(lat,lon),rasterized=True)
    ax.plot(x,fit,'-r',lw=2.0,label='MAAT: {0:.2f}, v: {1:.2f} deg/yrs, dphi:{2:.2f} rad, Amp:{3:.2f} deg'.format(np.mean(temp),v,dphi,amp))
    ax.plot(xlim,[0,0],'--',c='black')
    fig.autofmt_xdate()
    ax.set_xlim(xlim)
    ax.set_ylim([-30,30])
    plt.xlabel('Time (Year/month/day)')
    plt.ylabel('Temperature (deg)')
    plt.legend(loc='best')
    fig.savefig('temperatures_{}_{}.pdf'.format(lat,lon), format='PDF')
    nfigure=+1 

    # plot mod
    fig2 = plt.figure(nfigure,figsize=(7,5))
    ax2 = fig2.add_subplot(1,1,1)
    ax2.set_xlim([0,1])
    ax2.plot(mode,temp, '-bo',ms=.5,lw=.1,color='blue',label='ERAI point {}, {}'.format(lat,lon),rasterized=True)
    x = np.arange(0,1,0.01)
    fit = seasonal(x,v,a,b,c)
    ax2.plot(x,fit,'-r',lw=2.0,label='MAAT: {0:.2f}'.format(np.mean(temp)))
    ax2.plot([0,1],[0,0],'--',c='black')

    idx = np.argwhere(np.diff(np.sign(fit))).flatten()
    for k in range(len(idx)):
        ax2.text(x[idx[k]],-25,'{:0.3f}'.format(x[idx[k]]))
        ax2.text(x[idx[k]],-28,(datetime.datetime(np.int(datemin), 1, 1) + datetime.timedelta(x[idx[k]]*365.1 - 1)).strftime('%Y/%m/%d'))
        ax2.plot([x[idx[k]],x[idx[k]]],[-30,30],'--',c='black')

    ax2.set_xticks(np.arange(0, 1, 1./12))
    ax2.xaxis.set_major_formatter(ticker.FormatStrFormatter('%0.1f'))
    plt.xlabel('Time')
    plt.ylabel('Temperature (deg)')
    plt.legend(loc='best')
    ax2.set_ylim([-30,30])
    fig2.savefig('Modtemperatures_{}_{}.pdf'.format(lat,lon), format='PDF')
Esempio n. 18
0
def save_data_cube(radar1, radar2, ncf_fname):
    ncf = NetCDFFile(ncf_fname, "w")
    ncf.createDimension("nx", len(radar1["xar"]))
    ncf.createDimension("ny", len(radar1["yar"]))
    ncf.createDimension("nl", len(radar1["levs"]))
    ncf.createDimension("one", 1)
    ncf.createDimension("two", 2)
    avail_vars_radar1 = set(radar1.keys()) & set(
        ["VE", "VR", "CZ", "RH", "PH", "ZD", "SW", "KD", "i_comp", "j_comp", "k_comp", "u_array", "v_array", "w_array"]
    )
    avail_vars_radar2 = set(radar2.keys()) & set(
        ["VE", "VR", "CZ", "RH", "PH", "ZD", "SW", "KD", "i_comp", "j_comp", "k_comp"]
    )
    # testme=make_var(ncf, var, 'f',  ('nx', 'ny', 'nl'), gp[var])
    ncf_varlist_radar1 = [
        make_var(ncf, var + "_radar1", "f", ("nx", "ny", "nl"), radar1[var]) for var in avail_vars_radar1
    ]
    ncf_varlist_radar2 = [
        make_var(ncf, var + "_radar2", "f", ("nx", "ny", "nl"), radar2[var]) for var in avail_vars_radar2
    ]
    xvar = ncf.createVariable("xar", "f", ("one", "nx"))
    for i in range(len(radar1["xar"])):
        xvar[0, i] = float(radar1["xar"][i])
    yvar = ncf.createVariable("yar", "f", ("one", "ny"))
    for j in range(len(radar1["yar"])):
        yvar[0, j] = float(radar1["yar"][j])
    # yvar.assignValue(array([gp['yar']]))
    lvar = ncf.createVariable("levs", "f", ("one", "nl"))
    for k in range(len(radar1["levs"])):
        lvar[0, k] = float(radar1["levs"][k])
    # lvar.assignValue(array([gp['levs']]))
    rad1_locvar = ncf.createVariable("radar1_loc", "f", ("one", "two"))
    rad2_locvar = ncf.createVariable("radar2_loc", "f", ("one", "two"))
    rad1_disvar = ncf.createVariable("radar1_dis", "f", ("one", "two"))
    rad2_disvar = ncf.createVariable("radar2_dis", "f", ("one", "two"))
    rad1_datvar = ncf.createVariable("radar1_date", "d", ("one", "one"))
    rad2_datvar = ncf.createVariable("radar2_date", "d", ("one", "one"))
    rad1_locvar[0, 0] = float(radar1["radar_loc"][0])
    rad1_locvar[0, 1] = float(radar1["radar_loc"][1])
    rad2_locvar[0, 0] = float(radar2["radar_loc"][0])
    rad2_locvar[0, 1] = float(radar2["radar_loc"][1])
    rad1_disvar[0, 0] = float(radar1["displacement"][0])
    rad1_disvar[0, 1] = float(radar1["displacement"][1])
    rad2_disvar[0, 0] = float(radar2["displacement"][0])
    rad2_disvar[0, 1] = float(radar2["displacement"][1])
    rad1_datvar[0, 0] = float(date2num(radar1["date"]))
    rad2_datvar[0, 0] = float(date2num(radar2["date"]))
    setattr(ncf, "radar1_name", radar1["radar_name"])
    setattr(ncf, "radar2_name", radar2["radar_name"])

    # ncf_varlist_gp=dict([(var+'_gp',ncf.createVariable(var+'_gp', 'f', ('nx', 'ny', 'nl'))) for var in avail_vars_gp])
    # for var in avail_vars_gp:
    # 	for i in range(ncf_varlist_gp[var+'_gp'].shape[0]):
    # 		for j in range(ncf_varlist_gp[var+'_gp'].shape[1]):
    # 			for k in range(ncf_varlist_gp[var+'_gp'].shape[2]):
    # 				ncf_varlist_gp[var+'_gp'][i,j,k]=gp[var][i,j,k]
    ncf.close()
Esempio n. 19
0
 def addSlider(self, valmax):
     """ put a slider widget to navigate through the whole ECG plot """
     ### FIX ME: Make all time objects as parameters??? (for flexibility)
     ### Maybe the self.endtime? Kase constant lagi ang starting point
     ### added valmax as the endtime parameter
     self.axtime = self.figure.add_axes([0.125, 0.1, 0.775, 0.03])
     self.time_scroller = matplotlib.widgets.Slider(
         self.axtime, "", date2num(self.starttime), date2num(valmax), valinit=date2num(self.starttime)
     )
     self.time_scroller.on_changed(self.updateWindow)
Esempio n. 20
0
def Draw01(D2A, StockName, Duration):
    #>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # show as a picture
    # x axis, time
    Llength = len(D2A) - 1
    CXL02 = []
    for Ll in range(1, Llength):
        CXL02.append(datetime.datetime.strptime(D2A[Ll][0], '%Y-%m-%d'))
    CXL02.reverse()
    #>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # y axis, adj close value
    CXL03 = []
    for Ll in range(1, Llength):
        CXL03.append(float(D2A[Ll][2]))
    CXL03.reverse()
    Label03 = D2A[0][2]
    # Buy piont
    CXL06 = []
    CXL07 = []
    for Ll in range(1, Llength):
        if D2A[Ll][12] == "BBB":
            CXL06.append(datetime.datetime.strptime(D2A[Ll][0], '%Y-%m-%d'))
            CXL07.append(float(D2A[Ll][2]))
    CXL06.reverse()
    CXL07.reverse()
    #---------------------------------------
    # Sell point
    CXL08 = []
    CXL09 = []
    for Ll in range(1, Llength):
        if D2A[Ll][13] == "SSS":
            CXL08.append(datetime.datetime.strptime(D2A[Ll][0], '%Y-%m-%d'))
            CXL09.append(float(D2A[Ll][2]))
    CXL08.reverse()
    CXL09.reverse()
    #---------------------------------------
    # volumn
    CXL10 = []
    for Ll in range(1, Llength):
        CXL10.append(int(D2A[Ll][5]))
    CXL10.reverse()
    #---------------------------------------
    plt.figure(1)
    ax1 = plt.subplot(211)
    ax2 = plt.subplot(212)
    plt.sca(ax1)
    plt.plot(CXL02, CXL03, color="black", label=Label03)
    plt.plot_date(pylab.date2num(CXL06), CXL07)
    plt.plot_date(pylab.date2num(CXL08), CXL09, color="red")
    plt.title(StockName + " + Average " + str(Duration) +
              " + Buy / Sell point ")
    plt.sca(ax2)
    plt.plot(CXL02, CXL10)
    plt.title(" Vol. ")
    plt.show()
def make_hourly(data):
    '''
	'''
    time = data['time']
    ntimesnew = int((time[-1] - time[0]).total_seconds() / 3600)
    timenew = [time[0] + dt.timedelta(i) / 24 for i in range(ntimesnew)]
    for varname, var in data.iteritems():
        if (varname == 'time'):
            continue
        var = sp.interp(pl.date2num(timenew), pl.date2num(time), var)
        data[varname] = var
    data['time'] = timenew
    return 0
def make_hourly(data):
	'''
	'''
	time = data['time']
	ntimesnew = int((time[-1]-time[0]).total_seconds() / 3600)
	timenew = [time[0]+dt.timedelta(i)/24 for i in range(ntimesnew)]
	for varname, var in data.iteritems():
		if (varname == 'time'):
			continue
		var = sp.interp(pl.date2num(timenew), pl.date2num(time), var)
                data[varname] = var
	data['time'] = timenew
	return 0
Esempio n. 23
0
def extraPlots():
    import datetime
    from matplotlib.dates import MONDAY, MonthLocator, WeekdayLocator, DateFormatter

    #converts dates to format matplotlib understands...
    time = P.date2num(data[:,dict['DateTimeUT']])

    mondays   = WeekdayLocator(MONDAY)
    months    = MonthLocator(range(1,13, 2), bymonthday=2)
    monthsFmt = DateFormatter("%b '%y")
    
    y2007 = datetime.date(2007, 1, 1)
    y2008 = datetime.date(2008, 1, 1)

    y2007plot = P.date2num(y2007)
    y2008plot = P.date2num(y2008)

    widening = 5.

    fig = P.figure()
    P.subplots_adjust(hspace=0.1)
    ax = fig.add_subplot(211)
    P.title('ALFOSC focus pyramid data')
    ax.plot_date(time, telfocusOld, 'wo', xdate = True)
    ax.plot_date(time, telfocusCorrected, 'bo')
    ax.axhline(medianNew, color = 'b', label = 'New Median', lw = 1., ls = '-')
    ax.axhline(medianOld, color = 'r', label ='Old Median', lw = 1., ls = '--')
    ax.axvline(y2007plot, color = 'k')
    ax.axvline(y2008plot, color = 'k')
    ax.legend(shadow = True, loc = 'best')
    P.ylabel('Telescope Focus + Median Offset')
    P.xlim(min(time)-widening, max(time)+widening)
    P.ylim(23300.,23500.)
    ax.xaxis.set_major_locator(months)
    ax.xaxis.set_major_formatter(monthsFmt)
    ax.xaxis.set_minor_locator(mondays)
    fig.autofmt_xdate()
    
    bx = fig.add_subplot(212)
    bx.plot_date(time, data[:,dict['TempInAirDegC']], fmt='ro', xdate=True)
    bx.axhline(0.)
    bx.axvline(y2007plot, color = 'k')
    bx.axvline(y2008plot, color = 'k')
    bx.xaxis.set_major_locator(months)
    bx.xaxis.set_major_formatter(monthsFmt)
    bx.xaxis.set_minor_locator(mondays)
    P.xlim(min(time)-widening, max(time)+widening)
    P.ylabel('Temperature In Air (DegC)')
    fig.autofmt_xdate()
    fig.savefig('foc-pyr_time.png')
    P.close()
Esempio n. 24
0
def extraPlots():
    import datetime
    from matplotlib.dates import MONDAY, MonthLocator, WeekdayLocator, DateFormatter

    #converts dates to format matplotlib understands...
    time = P.date2num(data[:, dict['DateTimeUT']])

    mondays = WeekdayLocator(MONDAY)
    months = MonthLocator(range(1, 13, 2), bymonthday=2)
    monthsFmt = DateFormatter("%b '%y")

    y2007 = datetime.date(2007, 1, 1)
    y2008 = datetime.date(2008, 1, 1)

    y2007plot = P.date2num(y2007)
    y2008plot = P.date2num(y2008)

    widening = 5.

    fig = P.figure()
    P.subplots_adjust(hspace=0.1)
    ax = fig.add_subplot(211)
    P.title('ALFOSC focus pyramid data')
    ax.plot_date(time, telfocusOld, 'wo', xdate=True)
    ax.plot_date(time, telfocusCorrected, 'bo')
    ax.axhline(medianNew, color='b', label='New Median', lw=1., ls='-')
    ax.axhline(medianOld, color='r', label='Old Median', lw=1., ls='--')
    ax.axvline(y2007plot, color='k')
    ax.axvline(y2008plot, color='k')
    ax.legend(shadow=True, loc='best')
    P.ylabel('Telescope Focus + Median Offset')
    P.xlim(min(time) - widening, max(time) + widening)
    P.ylim(23300., 23500.)
    ax.xaxis.set_major_locator(months)
    ax.xaxis.set_major_formatter(monthsFmt)
    ax.xaxis.set_minor_locator(mondays)
    fig.autofmt_xdate()

    bx = fig.add_subplot(212)
    bx.plot_date(time, data[:, dict['TempInAirDegC']], fmt='ro', xdate=True)
    bx.axhline(0.)
    bx.axvline(y2007plot, color='k')
    bx.axvline(y2008plot, color='k')
    bx.xaxis.set_major_locator(months)
    bx.xaxis.set_major_formatter(monthsFmt)
    bx.xaxis.set_minor_locator(mondays)
    P.xlim(min(time) - widening, max(time) + widening)
    P.ylabel('Temperature In Air (DegC)')
    fig.autofmt_xdate()
    fig.savefig('foc-pyr_time.png')
    P.close()
Esempio n. 25
0
    def uvmat(self):
        hsmat = np.zeros ([20]+list(self.llat.shape)).astype(np.int16)
        jd1 = pl.date2num(dtm(2003,1,1))
        jd2 = pl.date2num(dtm(2009,12,31))

        vlist = np.linspace(0,1.5,21)
        for jd in np.arange(jd1,jd2+1):
            print pl.num2date(jd)
            self.load(jd=jd)
            uv = np.sqrt(self.u**2 + self.v**2)
            for n,(v1,v2) in enumerate(zip(vlist[:-1],vlist[1:])):
                msk = (uv>=v1) & (uv<v2)
                hsmat[n,msk] += 1
        return hsmat
Esempio n. 26
0
    def uvmat(self):
        hsmat = np.zeros([20] + list(self.llat.shape)).astype(np.int16)
        jd1 = pl.date2num(dtm(2003, 1, 1))
        jd2 = pl.date2num(dtm(2009, 12, 31))

        vlist = np.linspace(0, 1.5, 21)
        for jd in np.arange(jd1, jd2 + 1):
            print pl.num2date(jd)
            self.load(jd=jd)
            uv = np.sqrt(self.u**2 + self.v**2)
            for n, (v1, v2) in enumerate(zip(vlist[:-1], vlist[1:])):
                msk = (uv >= v1) & (uv < v2)
                hsmat[n, msk] += 1
        return hsmat
Esempio n. 27
0
def plot(prediction_dir):

    # get real data
    USING_CACHE = True
    series_cache_path = './data/series_cache.pkl'
    real_data = extractHourlyPower(series_cache_path, USING_CACHE)

    # get prediction data
    fi = open(prediction_dir,'rb')
    prediction = pickle.load(fi)
    fi.close()
    print('prediction amount: '+str(len(prediction)))
    print('prediction sahpe: '+str(numpy.shape(prediction[0])))

    # get prediction data
    #fi = open('./result/pre_history_average.pkl','rb')
    fi = open('./result/pre_ecd-dcd_batch1_lstm1_patience5.pkl','rb')
    history_average = pickle.load(fi)
    fi.close()

    # plot real data
    date = [str(real_data[i:i+1].keys().values[0]) for i in range(len(real_data))]
    real_power = [i for i in real_data]
    real_x = [dt.datetime.strptime(d,'%Y%m%d%H') for d in date]
    pylab.plot_date(pylab.date2num(real_x), real_power, linestyle='-', color='black')
    
    # plot prediction data
    power_pre = prediction
    pre_point_num = len(power_pre)
    data_pre = date[-pre_point_num:]
    #print(data_pre)
    #print(power_pre)
    x_pre = [dt.datetime.strptime(d,'%Y%m%d%H') for d in data_pre]
    pylab.plot_date(pylab.date2num(x_pre), power_pre, linestyle='-', color='green')

    # plot history average
    pylab.plot_date(pylab.date2num(x_pre), history_average, linestyle='-', color='red')
    
    # calculate rmse
    y_true = real_power[-pre_point_num:]
    print('history average rmse: ', sqrt(smet.mean_squared_error(y_true, history_average)))
    print('predection rmse: ', sqrt(smet.mean_squared_error(y_true, power_pre)))
    #print(sqrt(smet.mean_squared_error(y_true, power_val_pre)))
        
    xlabel(u"date & hour")
    ylabel(u"power_used (every hour)")

    grid(True)

    show()
Esempio n. 28
0
 def refresh(self, fld, fldtype="DAY", jd1=None, jd2=None, delall=False):
     """ Read a L3 mapped file and add field to current instance"""
     jd1 = pl.datestr2num('2003-01-01') if jd1 is None else jd1
     jd2 = int(pl.date2num(dtm.now())) - 1  if jd2 is None else jd2
     for jd in np.arange(jd1, jd2):
         print " --- %s --- " % pl.num2date(jd).strftime('%Y-%m-%d')
         filename = os.path.join(
             self.datadir, self.generate_filename(jd,fld,fldtype) + ".nc")
         if delall:
             for fn in glob.glob(filename + "*"):
                 print "Deleted %s" % fn
                 os.remove(fn)
         print "Checking files"
         if not os.path.isfile(filename[:-3] + '.npz'):
             try:
                 self.load(fld, fldtype, jd=jd, verbose=True)
             except IOError:
                 print "Downloading failed. Trying to remove old files."
                 try:
                     os.remove(filename)
                 except:
                     pass
                 try:
                     self.load(fld,fldtype,jd=jd,verbose=True)
                 except:
                     print ("   ###   Warning! Failed to add %s   ###" %
                            os.path.basename(filename))
             print "\n"
         else:
             print "found"
Esempio n. 29
0
def plotresult(outflow, wetness):
    "Plots the result using matplotlib"

    # import some matplotlib functionality
    import pylab
    import numpy
    import cmf.draw
    # Make the upper plot
    pylab.subplot(211)
    # Draw the timeseries. cmf.draw.plot_timeseries is a thin wrapper over pylab.plot_date
    cmf.draw.plot_timeseries(outflow)
    pylab.title('Groundwater recharge')
    pylab.axis('tight')
    # Make the lower plot
    pylab.subplot(212)
    # Convert wetness to a numpy array - for faster analysis
    wetness = numpy.array(wetness)
    # Make a times/depth contour map
    pylab.contourf([pylab.date2num(t.AsPython()) for t in outflow.iter_time()],
                   c.layers.thickness * 0.5 - c.layers.lower_boundary,
                   wetness.T,
                   levels=numpy.linspace(wetness.min(), 1, 50),
                   cmap=pylab.cm.jet_r,
                   extend='both',
                   )
    pylab.title('Wetness')
    pylab.gca().xaxis_date()
    pylab.axis('tight')
    pylab.show()
Esempio n. 30
0
 def _jd_to_dtm(self):
     dtobj = pl.num2date(self.jd)
     njattrlist = ['yr',  'mn',   'dy', 'hr',  'min',    'sec']
     dtattrlist = ['year','month','day','hour','minute', 'second']
     for njattr,dtattr in zip(njattrlist, dtattrlist):
         setattr(self, njattr, getattr(dtobj, dtattr))
     self.yd = self.jd - pl.date2num(dtm(self.yr,1,1)) + 1
Esempio n. 31
0
def make_ticket_history_table(env, dates, sorted_events):
    """
        This function takes list of dates in milestone and ticket events
        then produce a dictionary with key as milestone event and value as 
        that list of ticket counts each day.
        
        dates is the numerical array of date in UTC time.
        sorted_event is dictionary of events that occurs in milestone
    
    """
    #Initialize the count using key in events

    tkt_counts = {'Enter':[], 'Leave':[], 'Finish':[]}

    #initialize the table    
    for date in dates:

        #env.log.info("Date:%s" % (num2date(date),))
        for key in tkt_counts:
            tkt_counts[key].append(0)

    #Create dictionary of list that hold ticket count each day in dates
    for event in sorted_events:

        #Time in epoch time
        date = to_datetime(event[0])

        #Get the index of this date in the dates list
        index = bisect(dates, date2num(date)) - 1

        for key in tkt_counts:
            tkt_counts[key][index] = tkt_counts[key][index] + len(event[1][key])

    return tkt_counts
Esempio n. 32
0
def calc_mld(files, start, x0=0.0, y0=0.0):
    """ Caclulate density-based MLD from a bunch of VTU files
    """

    mld = []
    times = []
    dates = []
    for file in files:

        try:
            os.stat(file)
        except:
            print("No such file: %s" % file)
            sys.exit(1)

        # open vtu and derive the field indices of the edge at (x=0,y=0) ordered by depth
        u = vtktools.vtu(file)
        pos = u.GetLocations()
        ind = get_1d_indices(pos, x0, y0)

        # from this we can derive the 1D profile of any field like this:
        depth = vtktools.arr([-pos[i, 2] for i in ind])

        # handle time for different types of plots
        time = u.GetScalarField('Time')
        times.append(time[0])  # seconds
        dates.append(date2num(start +
                              timedelta(seconds=time[0])))  # integer datetime

        # grab density profile and calculate MLD_den (using 2 different deviation parameters
        d = u.GetScalarField('Density')
        den = vtktools.arr([d[i] * 1000 for i in ind])
        mld.append(calc_mld_den(den, depth))  #den0 = 0.03 is default

    return mld, times, dates
Esempio n. 33
0
    def __init__(self, projname, casename=None, **kwargs):
        super(Trm, self).__init__(projname, casename, **kwargs)
        if not hasattr(self, 'trmdir'):
            self.trmdir = os.getenv('TRMDIR')
            if self.trmdir is None:
                raise EnvironmentError, """ Trmdir is not set.
                Add TRMDIR=/path/to/tracmass to your local environment
                or specify trmdir when calling Trm."""

        def parse(od, pn, cn, sfx):
            gridfile = '/%s/projects/%s/%s_%s.in' % (od, pn, cn, sfx)
            if not os.path.isfile(gridfile):
                raise IOError("Can't find the file %s" % gridfile)
            return nlt.parse(gridfile)

        self.nlgrid = parse(self.trmdir, self.projname, self.projname, "grid")
        self.nlrun = parse(self.trmdir, self.projname, self.casename, "run")

        if not hasattr(self, 'datadir'): self.datadir = self.nlrun.outDataDir
        if not hasattr(self, 'datafile'):
            self.datafile = self.nlrun.outDataFile

        self.base_iso = pl.date2num(
            dtm(self.nlgrid.baseYear, self.nlgrid.baseMon,
                self.nlgrid.baseDay)) - 1
        self.imt = self.nlgrid.IMT
        self.jmt = self.nlgrid.JMT
Esempio n. 34
0
 def __init__(self, ob):
     # populate attributes with sounding data, initially this will
     # only work with a netcdf variable object (from Sceintific.IO)
     # but more objects can be added by simply adding elif..
     # PLEASE always populate height in the values['alt'] position and
     # append values['date_list'] and datetime
     # datetime and date_list[index] are datetime objects
     # check if it is a netcdf variable list
     if "getValue" in dir(ob[ob.keys()[0]]):
         # this is a netcdf variables object
         self.datetime = num2date(datestr2num("19700101") + ob["base_time"].getValue() / (24.0 * 60.0 * 60.0))
         values = {}
         units = {}
         longname = {}
         for var in ob.keys():
             values.update({var: ob[var][:]})
             try:
                 units.update({var: ob[var].units})
             except AttributeError:
                 units.update({var: "no units"})
             try:
                 longname.update({var: ob[var].long_name})
             except AttributeError:
                 longname.update({var: "no longname"})
             values.update(
                 {"date_list": num2date(date2num(self.datetime) + values["time_offset"] / (24.0 * 60.0 * 60.0))}
             )
             units.update({"date_list": "unitless (object)"})
             self.values = values
             self.units = units
             self.long_name = longname
Esempio n. 35
0
    def _set_time_origin(self):
        try:
            self._time_counter_origin = self.read_nc_att(
                'time_counter', 'time_origin')
            ymd, hms = self._time_counter_origin.split(' ')
        except Exception:
            try:
                self._time_counter_origin = self.read_nc_att(
                    'time_counter', 'units')
                junk, junk, ymd, hms = self._time_counter_origin.split(' ')
                #print self._time_counter_origin
            except Exception:
                self._time_counter_origin = self.read_nc_att('time', 'units')
                junk, junk, ymd, hms = self._time_counter_origin.split(' ')
        #else: Exception
        #print self._time_counter_origin
        y, mo, d = ymd.split('-')
        h, mi, s = hms.split(':')

        #print y, mo, d, h, mi, s
        try:
            time_origin = datetime(int(y),
                                   strptime(mo, '%b').tm_mon, int(d), int(h),
                                   int(mi), int(s))
        except Exception:
            time_origin = datetime(int(y), int(mo), int(d), int(h), int(mi),
                                   int(s))
        self.time_origin = plt.date2num(time_origin)
        return self
Esempio n. 36
0
 def calc(self):
     p0=3.
     a0=15.
     ts = wn.TrainStrategy.BFGS
     inp = plb.date2num(self.time)
     tar = self.value-np.average(self.value)
     inp -= np.min(inp)
     delta = np.max(inp)-np.min(inp)
     k = 4*24/delta
     inp *= k
     w = wn.Net(10, np.min(inp), np.max(inp), np.average(tar),
                      a0, .01, p0)
     track = w.train(inp, tar, ts, 200, 100000, 1, True, True)
     #import pdb; pdb.set_trace()
     #tool.show(inp, tar, w, track)
     we = wn.Net(10, np.min(inp), np.max(inp), np.average(tar),
                      a0, .01, p0)
     tracke = we.train(inp, tar, ts, 200, 100000, 1, False, False)
     plb.title('Суммарная квадратичная ошибка')
     plb.plot(tracke['e'][0], label='Обычная вейвсеть')
     plb.plot(track['e'][0], linestyle='--', label='Полиморфная вейвсеть')
     plb.xlabel('Эпохи')
     plb.legend()
     print (tracke['e'][-1])
     print (track['e'][-1])
     plb.show()
     tool.show(inp, tar, w, tracke)
     sys.exit()
Esempio n. 37
0
 def refresh_chart(self):
     # Generate a chart.
     from pylab import setp, date2num, DateFormatter
     from datetime import datetime
     ps = 1e-12
     t, dt = self.data
     date = array([date2num(datetime.fromtimestamp(x)) for x in t])
     self.figure.subplots_adjust(bottom=0.2)
     self.plot = self.figure.add_subplot(1, 1, 1)
     self.plot.clear()
     self.plot.set_xlabel("time")
     self.plot.xaxis_date()
     setp(self.plot.get_xticklabels(), rotation=90, fontsize=10)
     self.plot.set_ylabel("timing error [ps]")
     self.plot.grid()
     if not all(isnan(dt)):
         self.plot.plot(date, (dt - self.nom_delay) / ps, '.')
         # Restrict the time range plotted according to the slider.
         tmin, tmax = amin(date[~isnan(dt)]), amax(date[~isnan(dt)])
         ##tmin = tmax - self.fraction*(tmax-tmin)
         self.plot.set_xlim(tmin, tmax)
     tmin, tmax = self.plot.get_xlim()
     if tmax - tmin < 5. / 24 / 60: date_format = "%H:%M:%S"
     elif tmax - tmin < 1: date_format = "%H:%M"
     else: date_format = "%b %d %H:%M"
     self.plot.xaxis.set_major_formatter(DateFormatter(date_format))
     if not isnan(self.min_dt): self.plot.set_ylim(ymin=self.min_dt / ps)
     if not isnan(self.max_dt): self.plot.set_ylim(ymax=self.max_dt / ps)
     self.canvas.draw()
Esempio n. 38
0
def work_1():
    data_in = datetime(2010,6,24,8,00,0)
    data_fin = datetime(2010,6,24,22,00,0)
    #np.concatenate((dati,dati2))
    dati = df.query_db('greenhouse.db','data',data_in,data_fin)
    Is = dati['rad_int_sup_solar']
    lista_to_filter = df.smooht_Is(Is)
    Is_2 = df.smooth_value(Is,lista_to_filter)
    
    tra_P_M = mf.transpiration_P_M(Is_2,dati['rad_int_inf_solar'],0.64,2.96,((dati['temp_1']+dati['temp_2'])/2)+273.15,(dati['RH_1']+dati['RH_2'])/200)
    tra_weight = mf.transpiration_from_balance(dati['peso_balanca'],300,2260000)
    
    
    delta_peso = np.diff(dati['peso_balanca'])
    fr,lista_irr,lista_irr_free = mf.find_irrigation_point(delta_peso,dati['data'])
    lista_night = mf.remove_no_solar_point(dati['rad_int_sup_solar'],50)
    
    
    lista_no = list(set(lista_irr+ lista_night))
    
    tran_weight,lista_yes = mf.transpiration_from_balance_irr(dati['peso_balanca'],300,2260000,lista_no)
    min_avg = 6
    tra_weigh_avg,time_weight = df.avg2(tran_weight,lista_yes,min_avg)
    tra_P_M_avg,time_P_M = df.avg2(tra_P_M,lista_yes,min_avg)
    
    data_plot.plot_time_data_2_y_same_axis(dati['data'][time_P_M], tra_P_M_avg, 'tra Penman', tra_weigh_avg, 'trans weight')
    RMSE = df.RMSE(tra_P_M_avg, tra_weigh_avg)
    print "RMSE is", RMSE
    print "RRMSE is", df.RRMSE(RMSE, tra_weigh_avg)
    
    date = dati['data'][time_P_M].astype(object)
    dates= pylab.date2num(date)
    pylab.plot_date(dates,tra_weigh_avg,'rx')
Esempio n. 39
0
def time_string_to_pylab_float_time(string):

	# I don't want this entire module to be dependent on pylab.
	try:
		import pylab
	except:
		print >> sys.stderr, "Couldn't import pylab module."
		sys.exit(1)

	# strptime can't handle the fractional part.  Split that off.
	pieces = string.split('.')
	date_and_time_part = pieces[0]
	time_struct = date_and_time_part_to_time_struct(date_and_time_part)
	float_time = pylab.date2num(time_struct)

	# Add in the fractional part.  Also try to handle the timezone part,
	# if any.
	if len(pieces) > 1:
		remaining_pieces = pieces[1].split()
		if len(remaining_pieces) == 1:
		    pass # No timezone offset
		elif len(remaining_pieces) == 2:
		    pass # Timezone offset; ignore it.
		else:
		    print >> sys.stderr, "Extra stuff in time string ", string
		    sys.exit(1)
		sec_frac = float('.' + remaining_pieces[0])
		return float_time + sec_frac / 24 / 60 / 60
	else:
		return float_time
Esempio n. 40
0
    def __init__(self, projname, casename=None, **kwargs):
        super(Trm, self).__init__(projname, casename, **kwargs)
        if not hasattr(self, "trmdir"):
            self.trmdir = os.getenv("TRMDIR")
            if self.trmdir is None:
                raise EnvironmentError, """ Trmdir is not set.
                Add TRMDIR=/path/to/tracmass to your local environment
                or specify trmdir when calling Trm."""

        def parse(od, pn, cn, sfx):
            gridfile = "/%s/projects/%s/%s_%s.in" % (od, pn, cn, sfx)
            if not os.path.isfile(gridfile):
                raise IOError("Can't find the file %s" % gridfile)
            return nlt.parse(gridfile)

        self.nlgrid = parse(self.trmdir, self.projname, self.projname, "grid")
        self.nlrun = parse(self.trmdir, self.projname, self.casename, "run")

        if not hasattr(self, "datadir"):
            self.datadir = self.nlrun.outDataDir
        if not hasattr(self, "datafile"):
            self.datafile = self.nlrun.outDataFile

        self.base_iso = pl.date2num(dtm(self.nlgrid.baseYear, self.nlgrid.baseMon, self.nlgrid.baseDay)) - 1
        self.imt = self.nlgrid.IMT
        self.jmt = self.nlgrid.JMT
def plot_date_time_graph(db_name,table_name):  
	format='%d %b %Y %I:%M %p'
	conn = sqlite3.connect(os.getcwd()+'/data/'+db_name+'.db')
	c=conn.cursor()
	date_time_arr=[]
	tweet_count=[]
	for row in c.execute('SELECT date_posted,time_posted From '+table_name):
		date_string= ' '.join(row)
		date_time_arr.append(datetime.strptime(date_string, format))

	for row in c.execute('SELECT retweets From '+table_name):
		tweet_count.append(row[0]+1)
		y= np.array(tweet_count)
		x=np.array(date_time_arr)
		N=len(tweet_count)
		colors = np.random.rand(N)
	numtime = [date2num(t) for t in x] 
 	# plotting the histogram
	ax = figure().gca()
	x, y, patches = hist(numtime, bins=50,alpha=.5)
	print x,y
	# adding the labels for the x axis
	tks = [num2date(p.get_x()) for p in patches] 
	xticks(tks,rotation=40)
	# formatting the dates on the x axis
	ax.xaxis.set_major_formatter(DateFormatter('%d %b %H:%M'))
	ax.set_xlabel('Time(dd-mm HH:MM)', fontsize=16)
	ax.set_ylabel('Tweet Count', fontsize=16)
	show()
def meter_logs(lines):
    lists = (line.lower().split(',') for line in lines)
    log = (dict(zip(defaultcolnames, el)) for el in lists)
    log = field_map(log, 'Time Stamp', 
                    lambda s: pylab.date2num(datetime.strptime(s, "%Y%m%d%H%M%S")))
    log = field_map(log, 'Watts', lambda s: float(s))
    return log
Esempio n. 43
0
def calc_mld(files,start,x0=0.0,y0=0.0):
    """ Caclulate density-based MLD from a bunch of VTU files
    """

    mld = []
    times = []
    dates = []
    for file in files:
      
        try:
            os.stat(file)
        except:
            print("No such file: %s" % file)
            sys.exit(1)

        # open vtu and derive the field indices of the edge at (x=0,y=0) ordered by depth
        u=vtktools.vtu(file)
        pos = u.GetLocations()
        ind = get_1d_indices(pos, x0, y0)
    
        # from this we can derive the 1D profile of any field like this:
        depth = vtktools.arr([-pos[i,2] for i in ind])
    
        # handle time for different types of plots
        time = u.GetScalarField('Time')
        times.append(time[0])   # seconds
        dates.append( date2num(start + timedelta(seconds=time[0])) ) # integer datetime
    
        # grab density profile and calculate MLD_den (using 2 different deviation parameters
        d = u.GetScalarField('Density')
        den = vtktools.arr( [d[i] * 1000 for i in ind] )
        mld.append( calc_mld_den(den, depth) ) #den0 = 0.03 is default


    return mld, times, dates
Esempio n. 44
0
def make_ticket_history_table(env, dates, sorted_events):
    """
        This function takes list of dates in milestone and ticket events
        then produce a dictionary with key as milestone event and value as 
        that list of ticket counts each day.
        
        dates is the numerical array of date in UTC time.
        sorted_event is dictionary of events that occurs in milestone
    
    """
    #Initialize the count using key in events

    tkt_counts = {'Enter': [], 'Leave': [], 'Finish': []}

    #initialize the table
    for date in dates:

        #env.log.info("Date:%s" % (num2date(date),))
        for key in tkt_counts:
            tkt_counts[key].append(0)

    #Create dictionary of list that hold ticket count each day in dates
    for event in sorted_events:

        #Time in epoch time
        date = to_datetime(event[0])

        #Get the index of this date in the dates list
        index = bisect(dates, date2num(date)) - 1

        for key in tkt_counts:
            tkt_counts[key][index] = tkt_counts[key][index] + len(
                event[1][key])

    return tkt_counts
Esempio n. 45
0
 def get_spot_history(self, instance_type, start=None, end=None, plot=False):
     if not utils.is_iso_time(start):
         raise exception.InvalidIsoDate(start)
     if not utils.is_iso_time(end):
         raise exception.InvalidIsoDate(end)
     hist = self.conn.get_spot_price_history(start_time=start, 
                                     end_time=end,
                                     instance_type=instance_type, 
                                     product_description="Linux/UNIX")
     if not hist:
         raise exception.SpotHistoryError(start,end)
     dates = [ utils.iso_to_datetime_tuple(i.timestamp) for i in hist]
     prices = [ i.price for i in hist ]
     maximum = max(prices)
     avg = sum(prices)/len(prices)
     log.info("Current price: $%.2f" % hist[-1].price)
     log.info("Max price: $%.2f" % maximum)
     log.info("Average price: $%.2f" % avg)
     if plot:
         try:
             import pylab
             pylab.plot_date(pylab.date2num(dates), prices, linestyle='-') 
             pylab.xlabel('date')
             pylab.ylabel('price (cents)')
             pylab.title('%s Price vs Date (%s - %s)' % (instance_type, start, end))
             pylab.grid(True)
             pylab.show()
         except ImportError,e:
             log.error("Error importing pylab:")
             log.error(str(e)) 
             log.error("please check that matplotlib is installed and that:")
             log.error("   $ python -c 'import pylab'")
             log.error("completes without error")
Esempio n. 46
0
 def refresh(self, fld, fldtype="DAY", jd1=None, jd2=None, delall=False):
     """ Read a L3 mapped file and add field to current instance"""
     jd1 = pl.datestr2num('2003-01-01') if jd1 is None else jd1
     jd2 = int(pl.date2num(dtm.now())) - 1 if jd2 is None else jd2
     for jd in np.arange(jd1, jd2):
         print " --- %s --- " % pl.num2date(jd).strftime('%Y-%m-%d')
         filename = os.path.join(
             self.datadir,
             self.generate_filename(jd, fld, fldtype) + ".nc")
         if delall:
             for fn in glob.glob(filename + "*"):
                 print "Deleted %s" % fn
                 os.remove(fn)
         print "Checking files"
         if not os.path.isfile(filename[:-3] + '.npz'):
             try:
                 self.load(fld, fldtype, jd=jd, verbose=True)
             except IOError:
                 print "Downloading failed. Trying to remove old files."
                 try:
                     os.remove(filename)
                 except:
                     pass
                 try:
                     self.load(fld, fldtype, jd=jd, verbose=True)
                 except:
                     print("   ###   Warning! Failed to add %s   ###" %
                           os.path.basename(filename))
             print "\n"
         else:
             print "found"
Esempio n. 47
0
def batch_insert():
    import batch
    def copy(jd):
        tr = traj('jplNOW','ftp','/Volumes/keronHD3/ormOut/')
        print pl.num2date(jd), jd
        tr.load(jd)
        tr.remove_satnans()
        if len(tr.x>0):
            tr.db_copy()

    #batch.jdloop(copy,733773.0, 734138.0,3)
    for jd in np.arange(733865.0,734138):
        dt1 = pl.date2num(dtm.now())
        copy(jd)
        dt2 = pl.date2num(dtm.now())        
        print "----------",dt2-dt1
Esempio n. 48
0
def predict(recentDate, models):
    newDates = []
    estCPI = []
    for i in range(24):
        newDates.append(add_months(recentDate, 1))
        recentDate = newDates[-1]
    mplDates = pylab.date2num(newDates)
    for m in models:
        for x in mplDates:
            estCPI.append(pylab.polyval(m, x))
    fig, ax = pylab.subplots()
    pylab.rcParams['lines.linewidth'] = 1
    pylab.rcParams['axes.titlesize'] = 10
    pylab.rcParams['axes.labelsize'] = 10
    pylab.rcParams['xtick.labelsize'] = 8
    pylab.rcParams['ytick.labelsize'] = 8
    pylab.rcParams['xtick.major.size'] = 8
    pylab.rcParams['ytick.major.size'] = 8
    pylab.rcParams['lines.markersize'] = 5
    pylab.rcParams['legend.numpoints'] = 1
    pylab.yticks(pylab.arange(min(estCPI), max(estCPI) + 1, 0.5))
    ax.fmt_xdata = pylab.DateFormatter('%Y-%m-%d')
    ax.set_title('Projected Canada CPI Inflation')
    pylab.xlabel('Date')
    fig.autofmt_xdate()
    pylab.ylabel('Est. CPI')
    pylab.plot(newDates, estCPI)
Esempio n. 49
0
 def refresh(self, jd1=None, jd2=None):
     """ Read a L3 mapped file and add field to current instance"""
     jd1 = self.jdmin if jd1 is None else jd1
     jd2 = int(pl.date2num(dtm.now())) - 1 if jd2 is None else jd2
     for jd in np.arange(jd1, jd2 + 1):
         filename = os.path.join(self.datadir, self.generate_filename(jd))
         print " --- %s --- " % pl.num2date(jd).strftime('%Y-%m-%d')
         print "Checking %s" % filename + '.bz2'
         if not os.path.isfile(filename + '.bz2'):
             try:
                 self.load(jd=jd, verbose=True)
             except IOError:
                 print "Downloading failed. Trying to remove old files."
                 try:
                     os.remove(filename)
                 except:
                     pass
                 try:
                     os.remove(filename + ".bz2")
                 except:
                     pass
                 try:
                     self.load(jd=jd, verbose=True)
                 except:
                     print("   ###   Warning! Failed to add %s   ###" %
                           os.path.basename(filename))
             print "\n"
         else:
             print "found"
Esempio n. 50
0
def parsedate(s):
    if len(s) <= 7:
        year, month = s.split("-")
        result = datetime.datetime(int(year), int(month), 15)
    else:
        result = parser.parse(s)
    return pylab.date2num(result)
Esempio n. 51
0
def batch_insert():
    import batch
    def copy(jd):
        tr = traj('jplNOW','ftp','/Volumes/keronHD3/ormOut/')
        print pl.num2date(jd), jd
        tr.load(jd)
        tr.remove_satnans()
        if len(tr.x>0):
            tr.db_copy()

    #batch.jdloop(copy,733773.0, 734138.0,3)
    for jd in np.arange(733865.0,734138):
        dt1 = pl.date2num(dtm.now())
        copy(jd)
        dt2 = pl.date2num(dtm.now())        
        print "----------",dt2-dt1
Esempio n. 52
0
def plot_dwaf_data(realtime, file_name='data_plot.png', gauge=True):
    x = pl.date2num(realtime.date)
    y = realtime.q

    pl.clf()
    pl.figure(figsize=(7.5, 4.5))
    pl.rc('text', usetex=True)# TEX fonts
    pl.plot_date(x,y,'b-',linewidth=1)
    pl.grid(which='major')
    pl.grid(which='minor')

    if gauge:
        pl.ylabel(r'Flow rate (m$^3$s$^{-1}$)')
        title = 'Real-time flow -- %s [%s]' % (realtime.station_id[0:6], realtime.station_desc)
    else:
        title = 'Real-time capacity -- %s [%s]' % (realtime.station_id[0:6], realtime.station_desc)
        pl.ylabel('Percentage of F.S.C')

    labeled_days = DayLocator(interval=3)
    ticked_days = DayLocator()
    dayfmt = DateFormatter('%d/%m/%Y')

    ax = pl.gca()
    ax.xaxis.set_major_locator(labeled_days)
    ax.xaxis.set_major_formatter(dayfmt)
    ax.xaxis.set_minor_locator(ticked_days)

    pl.xticks(fontsize=10)
    pl.yticks(fontsize=10)

    pl.title(title, fontsize=14)

    pl.savefig(file_name, dpi=100)
Esempio n. 53
0
def calc_mld_tke_files(files,start,x0=0.0,y0=0.0):
    """ Caclulate tke-based MLD from a bunch of VTU files
    """

    mld = []
    times = []
    dates = []
    for file in files:
      
        try:
            os.stat(file)
        except:
            print "No such file: %s" % file
            sys.exit(1)

        # open vtu and derive the field indices of the edge at (x=0,y=0) ordered by depth
        u=vtktools.vtu(file)
        pos = u.GetLocations()
        ind = get_1d_indices(pos, x0, y0)
    
        # from this we can derive the 1D profile of any field like this:
        depth = vtktools.arr([-pos[i,2] for i in ind])
    
        # handle time for different types of plots
        time = u.GetScalarField('Time')
        times.append(time[0])   # seconds
        dates.append( date2num(start + timedelta(seconds=time[0])) ) # integer datetime
    
        # grab density profile and calculate MLD
        d = u.GetScalarField('GLSTurbulentKineticEnergy')
        tke = vtktools.arr( [d[i] for i in ind] )
        mld.append( calc_mld_tke(tke, depth) )


    return mld, times, dates
Esempio n. 54
0
 def refresh(self, jd1=None, jd2=None):
     """ Read a L3 mapped file and add field to current instance"""
     jd1 = self.jdmin if jd1 is None else jd1
     jd2 = int(pl.date2num(dtm.now())) - 1  if jd2 is None else jd2
     for jd in np.arange(jd1, jd2+1):
         filename = os.path.join(self.datadir, self.generate_filename(jd))
         print " --- %s --- " % pl.num2date(jd).strftime('%Y-%m-%d')
         print "Checking %s" % filename + '.bz2'
         if not os.path.isfile(filename + '.bz2'):
             try:
                 self.load(jd=jd, verbose=True)
             except IOError:
                 print "Downloading failed. Trying to remove old files."
                 try:
                     os.remove(filename)
                 except:
                     pass
                 try:
                     os.remove(filename + ".bz2")
                 except:
                     pass
                 try:
                     self.load(jd=jd,verbose=True)
                 except:
                     print ("   ###   Warning! Failed to add %s   ###" %
                            os.path.basename(filename))
             print "\n"
         else:
             print "found"
Esempio n. 55
0
 def get_spot_history(self, instance_type,
                      start=None, end=None, plot=False):
     if not utils.is_iso_time(start):
         raise exception.InvalidIsoDate(start)
     if not utils.is_iso_time(end):
         raise exception.InvalidIsoDate(end)
     hist = self.conn.get_spot_price_history(start_time=start,
                                     end_time=end,
                                     instance_type=instance_type,
                                     product_description="Linux/UNIX")
     if not hist:
         raise exception.SpotHistoryError(start, end)
     dates = [utils.iso_to_datetime_tuple(i.timestamp) for i in hist]
     prices = [i.price for i in hist]
     maximum = max(prices)
     avg = sum(prices) / len(prices)
     log.info("Current price: $%.2f" % hist[-1].price)
     log.info("Max price: $%.2f" % maximum)
     log.info("Average price: $%.2f" % avg)
     if plot:
         try:
             import pylab
             pylab.plot_date(pylab.date2num(dates), prices, linestyle='-')
             pylab.xlabel('date')
             pylab.ylabel('price (cents)')
             pylab.title('%s Price vs Date (%s - %s)' % (instance_type,
                                                         start, end))
             pylab.grid(True)
             pylab.show()
         except ImportError, e:
             log.error("Error importing pylab:")
             log.error(str(e))
             log.error("please ensure matplotlib is installed and that:")
             log.error("   $ python -c 'import pylab'")
             log.error("completes without error")
Esempio n. 56
0
def plot_2d_data(data,depths,time_secs,start_date,file_path,axis_label,finish_date=None,mld_data=None,max_depth=150,interval=3,minimum=None,maximum=None,spacing=None,colour_scale=cm.jet,dates=None):
    """
    """
    # turn given 2d-arrays into numpy arrays (in case they are not already)
    data = vtktools.arr(data)
    time_secs = vtktools.arr(time_secs)
    depths = vtktools.arr(depths)
    
    # convert time profiles in seconds into months
    start = datetime.strptime(start_date, "%Y-%m-%d %H:%M:%S")
    if (dates == None):
        dates = time_secs
        i = 0
        for time in time_secs:
            t = float(time[0].item())
            dates[i,:] = date2num(start + timedelta(seconds=t))
            i += 1
        
    # see if finishing date is given, default to last time given
    if (finish_date != None):
        finish = datetime.strptime(finish_date, "%Y-%m-%d %H:%M:%S")
    else:
        finish = dates[-1][0] 
    
    # define min/max and spacing of data if not given (so we see all of the data)
    if (minimum == None):
        minimum = data.min() 
        minimum = minimum - (0.1*minimum) 
    if (maximum == None):
        maximum = data.max()
        maximum = maximum + (0.1*maximum) 
    if (spacing == None):
        spacing = (maximum - minimum) /256.

    # plot 2d colour graph...
    fig = figure(figsize=(15,8),dpi=90)
    ax = fig.add_axes([.1,.18,.9,.7])
    cs=ax.contour(dates, depths, data, arange(minimum,maximum,spacing),cmap=colour_scale)
    cs=ax.contourf(dates, depths, data, arange(minimum,maximum,spacing),cmap=colour_scale)
    pp=colorbar(cs,format='%.2f')
    if(mld_data!=None):
        ax.plot(dates[:,0],mld_data,'w', alpha=0.7)
    
    dateFmt = mpl.dates.DateFormatter('%m/%Y')
    ax.xaxis.set_major_formatter(dateFmt)
    monthsLoc = mpl.dates.MonthLocator(interval=interval)
    ax.xaxis.set_major_locator(monthsLoc)
    labels = ax.get_xticklabels()
    for label in labels:
        label.set_rotation(30) 
    ax.set_ylim(max_depth, 0)
    ax.set_xlim(start,finish)
    pp.set_label(axis_label)
    xlabel('Date (mm/yyyy)')
    ylabel('Depth (m)')
    
    form = file_path.split('.')[-1].strip()
    savefig(file_path, dpi=90,format=form)
    close(fig)