def update_attributes(self): if self.fbfile.data is None: self.start_text.SetLabel('n/a') self.end_text.SetLabel('n/a') else: self.start_text.SetLabel(num2date(self.data['dn_py'][0]).strftime("%Y-%m-%d %H:%M:%S")) self.end_text.SetLabel(num2date(self.data['dn_py'][-1]).strftime("%Y-%m-%d %H:%M:%S"))
def daily_timseries( ts ): fig = Figure( ( 2.56, 2.56 ), 300 ) canvas = FigureCanvas(fig) ax = fig.add_axes((0,0,1,1)) ax.set_ylim( [ 0 , 500 ] ) preferspan = ax.axhspan( SAFE[0], SAFE[1], facecolor='g', alpha=0.2, edgecolor = '#003333', linewidth=1 ) # XXX: gets a list of days. timestamps = glucose.get_days( ts.time ) halfday = dates.relativedelta( hours=12 ) soleday = dates.relativedelta( days=1 ) xmin, xmax = ( timestamps[ 0 ], timestamps[ 1 ] + soleday ) ax.set_xlim( [ xmin, xmax ] ) #fig.autofmt_xdate( ) #plot_glucose_stems( ax, ts ) plt.setp(ax.get_xminorticklabels(), visible=False ) plt.setp(ax.get_xmajorticklabels(), visible=False ) plt.setp(ax.get_ymajorticklabels(), visible=False ) plt.setp(ax.get_yminorticklabels(), visible=False ) ax.grid(True) xmin, xmax = ax.get_xlim( ) log.info( pformat( { 'xlim': [ dates.num2date( xmin ), dates.num2date( xmax ) ], } ) ) return canvas
def format_coord(x, y): display_coord = current.transData.transform((x, y)) if not self.button_vcursor.isChecked(): inv = other.transData.inverted() try: ax_coord = inv.transform(display_coord) except IndexError: ax_coord = transformCoord2Log(display_coord, other, current) if other.get_lines(): unit1 = "(%s)" % self.dictofline[other.get_lines()[0]].unit else: return "" if current.get_lines(): unit2 = "(%s)" % self.dictofline[current.get_lines()[0]].unit else: if unit1 == "(Torr)": return ('{:<} y1%s = {:<}'.format( *[num2date(x).strftime("%a %d/%m %H:%M:%S"), '{:.3e}'.format(ax_coord[1])])) % unit1 else: return ('{:<} y1%s = {:<}'.format( *[num2date(x).strftime("%a %d/%m %H:%M:%S"), '{:.2f}'.format(ax_coord[1])])) % unit1 if unit1 == "(Torr)": return ('{:<} y1%s = {:<} y2%s = {:<}'.format( *[num2date(x).strftime("%a %d/%m %H:%M:%S"), '{:.3e}'.format(ax_coord[1]), '{:.2f}'.format(y)])) % (unit1, unit2) else: return ('{:<} y1%s = {:<} y2%s = {:<}'.format( *[num2date(x).strftime("%a %d/%m %H:%M:%S"), '{:.3e}'.format(ax_coord[1]), '{:.2f}'.format(y)])) % (unit1, unit2) else: self.verticalCursor(x, display_coord) return ""
def PaceDateGraph(x_list, y_list, graph_title, graph_xaxis, graph_yaxis): """Creates a graph showing the average pace of each run at the same distance range""" dates = [mdates.date2num(day) for day in x_list] #paces = [Hour2Seconds(time) for time in y1_paceList] fig, ax = plt.subplots() ax.plot(dates, y_list, '#FAC8CA') ax.set_title(graph_title) ax.set_xlabel(graph_xaxis) ax.set_ylim([(min(y_list)-60), (max(y_list)+60)]) ax.set_ylabel(graph_yaxis) ax.fill_between(dates, y_list, color='#FCE6E6') ax.xaxis.set_major_formatter(mdates.MonthLocator()) ax.xaxis.set_major_formatter(mdates.DateFormatter('%m/%Y')) dates.append(mdates.date2num(mdates.num2date(min(dates))-timedelta(days=3))) dates.append(mdates.date2num(mdates.num2date(max(dates))+timedelta(days=3))) ax.set_xlim([(mdates.num2date(min(dates))), (mdates.num2date(max(dates)))]) # if graph_type == "month": # ax2.xaxis.set_major_formatter(mdates.DayLocator()) # ax2.xaxis.set_major_formatter(mdates.DateFormatter('%d/%m')) # elif graph_type == "all": # dates.append(mdates.date2num(mdates.num2date(min(dates))-timedelta(days=3))) # dates.append(mdates.date2num(mdates.num2date(max(dates))+timedelta(days=3))) # ax2.set_xlim([(mdates.num2date(min(dates))), (mdates.num2date(max(dates)))]) plt.show()
def trajectory_point_to_str(data, index, with_address=True): coords = "%s, %s" % tuple(data[index][1:]) if with_address: geocoder = Geocoder() address = geocoder.reverse(coords, exactly_one = True).address else: address = None tz = pytz.timezone('US/Pacific') date = num2date(data[index][0], tz=tz) try: dt = (num2date(data[index+1][0]) - date).total_seconds() dist = distance(data[index], data[index+1]) v = ms_to_mph*dist/dt if dt!=0 else 0 if dt < 60: dt_str = "%ds" % dt elif dt < 60*60: dt_str = "%dmin" % (dt/60,) else: dt_str = "%.1fh" % (dt/60/60,) metrics = "%s; %.2fm; %.fmph" % (dt_str, dist, v) except IndexError: metrics = "NO DATA" return "Index:%s; Date:%s; Address:%s; Coords: %s; dt,ds,v:%s" % \ (index, date, address, coords, metrics)
def on_click(event): #capture events and button pressed events.append(event) if event.button == 1: l = self.left elif event.button == 3: l = self.right l.set_xdata([event.xdata, event.xdata]) l.figure.canvas.draw() #get the left slice time data, convert matplotlib num to date #format date string for the GUI start date field x1 = self.left.get_xdata()[0] temp_date = mdates.num2date(x1, tz=pytz.timezone(str(self.tzstringvar.get()))) datestring = temp_date.strftime('%m/%d/%y %H:%M:%S') self.date1.set(datestring) #get the left slice time data, convert matplotlib num to date #format date string for the GUI start date field x2 = self.right.get_xdata()[0] temp_date2 = mdates.num2date(x2, tz=pytz.timezone(str(self.tzstringvar.get()))) datestring2 = temp_date2.strftime('%m/%d/%y %H:%M:%S') self.date2.set(datestring2) xy = [[x1, 0], [x1, 1], [x2, 1], [x2, 0], [x1, 0]] #draw yellow highlight over selected area in graph patch.set_xy(xy) patch.figure.canvas.draw()
def get_12h_intervals(interval): my_intervals = list() for el in interval: # convert time number to date in order to compare, 43200sec=12hours if (md.num2date(el[1]) - md.num2date(el[0])).total_seconds() == 43200: my_intervals.append(el) return my_intervals
def onselect(xmin, xmax): """ A select event handler for the matplotlib SpanSelector widget. Selects a min/max range of the x or y axes for a matplotlib Axes. """ # convert matplotlib float dates to a datetime format date_min = mdates.num2date(xmin) date_max = mdates.num2date(xmax) # put the xmin and xmax in datetime format to compare date_min = datetime.datetime(date_min.year, date_min.month, date_min.day, date_min.hour, date_min.minute) date_max = datetime.datetime(date_max.year, date_max.month, date_max.day, date_max.hour, date_max.minute) # find the indices that were selected indices = np.where((dates >= date_min) & (dates <= date_max)) indices = indices[0] # set the data in second plot plot2.set_data(dates[indices], parameter['data'][indices]) # calculate new mean, max, min param_mean = nanmean(parameter['data'][indices]) param_max = np.nanmax(parameter['data'][indices]) param_min = np.nanmin(parameter['data'][indices]) ax2.set_xlim(dates[indices][0], dates[indices][-1]) ax2.set_ylim(param_min, param_max) # show text of mean, max, min values on graph; use matplotlib.patch.Patch properies and bbox text3 = 'mean = %.2f\nmax = %.2f\nmin = %.2f' % (param_mean, param_max, param_min) ax2_text.set_text(text3) fig.canvas.draw()
def drawGraph(events,filename): #determine the number of bins (bars) on the graph by #splitting the time the data spans by a time interval #calculate the time spanned by the data latestReading = num2date(max(events)) earliestReading = num2date(min(events)) dateRange = latestReading - earliestReading numberOfSeconds = dateRange.seconds + dateRange.days * 24 * 3600 #chop the data up into roughly 20 min intervals (in seconds) intervalSize = 24*60*60 #calculate how many intervals are there in numberOfSeconds #round up so there is always at least one histogramBins = math.ceil(float(numberOfSeconds)/float(intervalSize)) #draw the graph debug (str(histogramBins)+" histogramBins") debug (str(numberOfSeconds)+" numberOfSeconds") debug (str(intervalSize)+" intervalSize") debug (str(latestReading)+" latestReading") debug (str(earliestReading)+" earliestReading") debug (str(dateRange)+" dateRange") debug (str(intervalSize)+" intervalSize") fig = plotDatehist(events, histogramBins, "Bird Box 1 Activity", intervalSize) #save the graph to a file pyplot.savefig(filename)
def viewlim_to_dt(self): major_ticks = self.axis.get_majorticklocs() # to deal with non-uniform interval of major_ticks... #like days on month: [1,8,22,29,1,8,...] max_major_ticks_interval = max([abs(x2-x1) for (x1, x2) in zip(major_ticks[:-1],major_ticks[1:])]) return ( dates.num2date(major_ticks[0], self.tz), dates.num2date(major_ticks[0]+max_major_ticks_interval, self.tz) )
def __init__(self,par,display): gtk.Menu.__init__(self) ann = gtk.MenuItem(label=_("Annotate")) sub_ann = gtk.Menu() ann.set_submenu(sub_ann) for (ctx,color) in par.annotations.contexts(): it = gtk.ImageMenuItem("") img = gtk.Image() img.set_from_stock(gtk.STOCK_BOLD,gtk.ICON_SIZE_MENU) it.set_image(img) it.get_child().set_markup("<span bgcolor=\""+color+"\">"+ctx+"</span>") it.connect('activate',lambda w,str: par.create_annotation(str),ctx) sub_ann.append(it) sub_ann.append(gtk.SeparatorMenuItem()) new_it = gtk.ImageMenuItem(_("New context...")) new_img = gtk.Image() new_img.set_from_stock(gtk.STOCK_ADD,gtk.ICON_SIZE_MENU) new_it.set_image(new_img) new_it.connect('activate',lambda w: par.create_context()) sub_ann.append(new_it) self.append(ann) if display.src.hasCapability("play"): play_it = gtk.ImageMenuItem(stock_id=gtk.STOCK_MEDIA_PLAY) (start,end) = par.input_state.selection play_it.connect('activate',self.play_annotation, par.get_parent().get_parent(), display, num2date(start,UTC()), num2date(end,UTC())) self.append(play_it)
def _getdata(self,scname,dates): dates=np.array(dates) dates=dates[np.argsort(dates)] try: dtend=dates[-1]+timedelta(1) dtstart=dates[0] except TypeError: dtend=num2date(dates[-1]+1) dtstart=num2date(dates[0]) times,Lstar,MLT,MLAT,InvLat,density=get_density_and_time(scname,dtstart,dtend) # Find the points that are valid in all arrays validpoints=np.where(-(density.mask+times.mask)) # Remove invalid points from all the arrays times=times[validpoints] Lstar=Lstar[validpoints] MLT=MLT[validpoints] MLAT=MLAT[validpoints] InvLat=InvLat[validpoints] density=density[validpoints] maxima=np.where(local_maxima(Lstar))[0] minima=np.where(local_maxima(-Lstar))[0] segmentbounds=np.insert(maxima,np.searchsorted(maxima,minima),minima) segmentbounds[-1]-=1 otimes=date2num(times) return times,Lstar,MLT,MLAT,InvLat,density,segmentbounds
def set_ticks(self, axis, start, stop, step, minor): """Sets ticks from start to stop with stepsize step on the axis. params: axis: is a Axis instance on which the ticks should be set. start: is the limit_min. stop: is the limit_max, minor: True if minor ticks should be set, False if major ticks. """ if step: if isinstance(step, datetime.timedelta): stop_date = mdates.num2date(stop) start_date = mdates.num2date(start) range_seconds = (stop_date - start_date).total_seconds() step_seconds = step.total_seconds() nr_intervals = int(math.ceil( float(range_seconds) / float(step_seconds))) ticks = [mdates.date2num(start_date + x * step) for x in xrange(nr_intervals)] else: step = float(step) ticks = np.arange( math.ceil(start / step) * step, stop + step, step) if ticks[-1] > stop: ticks = ticks[:-1] if minor: major_ticks = set(axis.get_majorticklocs()) minor_ticks = set(ticks) ticks = sorted(minor_ticks.difference(major_ticks)) axis.set_ticks(ticks, minor)
def find_LowHighTide_Amplitudes(time_array, wl_array, tau=12.42/24., prc=1./24., order=1, plot=False, log=False, datetime_fmt='%d.%m.%Y %H:%M', plot_title="", axeslabel_fontsize=18., title_fontsize=20., axesvalues_fontsize=18., annotation_fontsize=18., legend_fontsize=18.): """ This script should be used with data which has no missing regions. Although it will work with all data, but may produce inaccuraces. Missing values should be represented by np.nan in wl_array. time_array - numpy array with datetime objects wl_array - numpy array with measured values of waterlevel. Must have same lenght as time_array tau - float, signal period in days prc - indicates presicion value +- for comparing time diffrerence between found extremums with tidal_cycle order - integer for scipy.signal.argrelextrema() plot - boolean flag to show plot log - boolean flag to show log # tidal cycle is aproximately 12h25min. Each timestep is 10 min => tidal cycle is 74.5 timesteps # therefore, maxima and minima will be filtered in a range of 73 to 76 timesteps from each other # for safety reasons lets take 720min """ if len(time_array) != len(wl_array): raise ValueError('time and waterlevel arays should have equal lenght.\nGot: len(time)={0}, len(wl)={1}'.format( len(time_array), len(wl_array))) local_maximums = scipy.signal.argrelextrema(wl_array, np.greater_equal, order=order, mode='clip')[0] local_minimums = scipy.signal.argrelextrema(wl_array, np.less_equal, order=order, mode='clip')[0] local_maximums = remove_regions_from_extremums(local_maximums, log=log) local_minimums = remove_regions_from_extremums(local_minimums, log=log) errors_high = check_extremums_dt(local_maximums, time_array, tau=tau, prc=prc, log=log) errors_low = check_extremums_dt(local_minimums, time_array, tau=tau, prc=prc , log=log) if plot: with sns.axes_style("whitegrid"): plot_extremums(time_array, wl_array, local_minimums, local_maximums, time_errors_high=errors_high, time_errors_low=errors_low, date_xaxis=True, dt=[tau, prc], plot_title=plot_title, axeslabel_fontsize=axeslabel_fontsize, title_fontsize=title_fontsize, axesvalues_fontsize=axesvalues_fontsize, annotation_fontsize=annotation_fontsize, legend_fontsize=legend_fontsize) ##################### # now create list for return.... LOW_TIDE = list() for v in local_minimums: t = time_array[v] w = wl_array[v] DateTime = datetime.strftime(num2date(t), datetime_fmt) LOW_TIDE.append([DateTime, w]) HIGH_TIDE = list() for v in local_maximums: t = time_array[v] w = wl_array[v] DateTime = datetime.strftime(num2date(t), datetime_fmt) HIGH_TIDE.append([DateTime, w]) return LOW_TIDE, HIGH_TIDE
def emolt_plotting(yrday,depth,temp,time11,samesites0,ax,k,ave_temp0,rgbcolors): #"ax" you can do like fig = plt.figure() ; ax = fig.add_subplot(111) #"k" "samesites0" ,this function should be in "for" loop, for k in range(len(samesites0)): # except "k", all of them should be a list #ave_temp0 means every average temperature for every samesites #rgbcolors is a color box, we select colors from it for plot temp0,yrday0=[],[] if temp<>[]: depth111s=min(depth) # sorted Temperature by date,time a=zip(yrday,temp) b=sorted(a, key=lambda a: a[0]) for e in range(len(temp)): yrday0.append(b[e][0]) temp0.append(b[e][1]) plt.plot(yrday0,temp0,color=rgbcolors[k],label=samesites0[k]+'(s): -'+str(int(depth111s))+','+str(round(ave_temp0[k],1))+'F',lw = 3) plt.ylabel('Temperature') plt.title('temp from '+num2date(min(time11)).strftime("%d-%b-%Y")+' to '+num2date(max(time11)).strftime("%d-%b-%Y")) plt.legend() #choose suited unit in x axis if max(time11)-min(time11)<5: monthsFmt = DateFormatter('%m-%d\n %H'+'h') if 5<=max(time11)-min(time11)<366: monthsFmt = DateFormatter('%m-%d') if max(time11)-min(time11)>366: monthsFmt = DateFormatter('%Y-%m') ax.xaxis.set_major_formatter(monthsFmt) #ax.set_xlabel(str(num2date(min(time11)).year)+"-"+str(num2date(max(time11)).year),fontsize=17) #limit x axis length ax.set_xlabel('Notation:(s) means near the surface of sea') plt.xlim([min(time11),max(time11)+(max(time11)-min(time11))/2]) plt.savefig('/net/home3/ocn/jmanning/py/huanxin/work/hx/please rename .png') plt.show()
def on_select_helper(self, xmin, xmax): """ Helper for on_select methods """ # convert matplotlib float dates to a datetime format date_min = mdates.num2date(xmin) date_max = mdates.num2date(xmax) # put the xmin and xmax in datetime format to compare date_min = datetime.datetime(date_min.year, date_min.month, date_min.day, date_min.hour, date_min.minute) date_max = datetime.datetime(date_max.year, date_max.month, date_max.day, date_max.hour, date_max.minute) # find the indices that were selected indices = np.where((self.dates >= date_min) & (self.dates <= date_max)) indices = indices[0] # get the selected dates and values selected_dates = self.dates[indices] selected_values = self.parameter["data"][indices] # compute simple stats on selected values selected_values_mean = nanmean(selected_values) selected_value_max = np.nanmax(selected_values) selected_value_min = np.nanmin(selected_values) return selected_dates, selected_values, selected_values_mean, selected_value_max, selected_value_min
def fix_trajectory_timezone(filename, folder_to_put, change_filename=True): ''' Add timezone to the trajectory ''' # http://www.saltycrane.com/blog/2009/05/converting-time-zones-datetime-objects-python/ traj = read_compressed_trajectory(filename, with_timezone=False) tz = timezone('US/Pacific') for i in range(traj.shape[0]): d = num2date(traj[i, 0]) d = d.replace(tzinfo = None) d = tz.localize(d) traj[i, 0] = date2num(d) result_filename = os.path.split(filename)[-1] if change_filename: file_date = num2date(date_str_to_num_converter_no_timezone(result_filename)) file_date = file_date.replace(tzinfo = None) file_date = tz.localize(file_date) result_filename = num_to_date_str_converter(date2num(file_date), with_timezone=True) resulting_path = os.path.join(folder_to_put, result_filename) write_compressed_trajectory(traj, os.path.join(folder_to_put, result_filename), with_timezone=True) return resulting_path
def get_axis( ax, limit ): xmin, xmax = limit ax.set_xlim( [ xmin, xmax ] ) ax.grid(True) #ax.set_ylim( [ ts.value.min( ) *.85 , 600 ] ) #ax.set_xlabel('time') majorLocator = dates.DayLocator( ) majorFormatter = dates.AutoDateFormatter( majorLocator ) minorLocator = dates.HourLocator( interval=6 ) minorFormatter = dates.AutoDateFormatter( minorLocator ) #ax.xaxis.set_major_locator(majorLocator) #ax.xaxis.set_major_formatter(majorFormatter) ax.xaxis.set_minor_locator(minorLocator) ax.xaxis.set_minor_formatter(minorFormatter) labels = ax.get_xminorticklabels() plt.setp(labels, rotation=30, fontsize='small') plt.setp(ax.get_xmajorticklabels(), rotation=30, fontsize='medium') xmin, xmax = ax.get_xlim( ) log.info( pformat( { 'xlim': [ dates.num2date( xmin ), dates.num2date( xmax ) ], 'xticks': dates.num2date( ax.get_xticks( ) ), } ) )
def mouseMoved(self,evt): pos = evt[0] ## using signal proxy turns original arguments into a tuple if self.sceneBoundingRect().contains(pos): mousePoint = self.plotItem.vb.mapSceneToView(pos) index = int(mousePoint.x()) xLeft = self.candleData[0,0] xRight = self.candleData[len(self.candleData)-1,0] if index > xLeft and index < xRight: #self.textInfo.setText('[%0.1f, %0.1f]' % (mousePoint.x(), mousePoint.y())) #self.textInfo.setHtml('<div style="text-align: center"><span style="color: #FFF;">This is the</span><br><span style="color: #FF0; font-size: 16pt;">[%0.1f, %0.1f]</span></div>'% (mousePoint.x(), mousePoint.y())) a = np.where(self.candleData[:,0]==index) if(len(a[0])>0): import matplotlib.dates as mpd import datetime as dt date = mpd.num2date(self.candleData[3,0]) strDate = dt.datetime.strftime(date, '%Y-%m-%d') self.textInfo.setHtml( '<div style="text-align: center">\ <span style="color: #FFF;">\ Current bar info:\ </span>\ <br>\ <span style="color: #FF0; font-size: 10pt;">\ time:%s\ <br>\ open:%0.3f\ <br>\ high:%0.3f\ <br>\ low:%0.3f\ <br>\ close:%0.3f\ </span>\ </div>'\ % (dt.datetime.strftime(mpd.num2date(self.candleData[a[0][0],0]),'%Y-%m-%d'), self.candleData[a[0][0],1], self.candleData[a[0][0],2], self.candleData[a[0][0],3], self.candleData[a[0][0],4])) #date = np.array([mpd.date2num(dt.datetime.strptime(dateStr, '%Y-%m-%d')) ) # 0)get environments rect = self.sceneBoundingRect() top = rect.top() left = rect.left() bottom = rect.bottom() width = rect.width() xAxis = mousePoint.x() yAxis = mousePoint.y() # 1)set postions self.vLine.setPos(xAxis) self.hLine.setPos(yAxis) self.textInfo.setPos(xAxis,yAxis)
def giant_timeseries( ts ): fig = Figure( ( 20.3, 3.5 ), 300 ) canvas = FigureCanvas(fig) ax = fig.add_subplot(111) preferspan = ax.axhspan( SAFE[0], SAFE[1], facecolor='g', alpha=0.35, edgecolor = '#003333', linewidth=1 ) # visualize glucose using stems # XXX: gets a list of days. timestamps = glucose.get_days( ts.time ) delta = dates.relativedelta( days=1, hours=12 ) oneday = dates.relativedelta( days=1 ) xmin, xmax = ( timestamps[ 0 ], timestamps[ -1 ] ) ax.set_xlim( [ xmin, xmax ] ) markers, stems, baselines = ax.stem( ts.time, ts.value, linefmt='b:' ) plt.setp( markers, color='red', linewidth=.5, marker='o' ) plt.setp( baselines, marker='None' ) fig.autofmt_xdate( ) ax.set_title('glucose history') ax.grid(True) ax.set_xlabel('time') majorLocator = dates.DayLocator( ) majorFormatter = dates.AutoDateFormatter( majorLocator ) minorLocator = dates.HourLocator( interval=6 ) minorFormatter = dates.AutoDateFormatter( minorLocator ) ax.xaxis.set_major_locator(majorLocator) ax.xaxis.set_major_formatter(majorFormatter) ax.xaxis.set_minor_locator(minorLocator) ax.xaxis.set_minor_formatter(minorFormatter) labels = ax.get_xminorticklabels() plt.setp(labels, rotation=30, fontsize='small') plt.setp(ax.get_xmajorticklabels(), rotation=30, fontsize='medium') xmin, xmax = ax.get_xlim( ) log.info( pformat( { 'xlim': [ dates.num2date( xmin ), dates.num2date( xmax ) ], } ) ) ax.set_ylabel('glucose mm/dL') return canvas
def _update(self,x): x = self._limit(x) if x < self.start: status = (x,self.start) else: status = (self.start,x) self.parent.set_message(num2date(status[0]).strftime("%c, %fus")+" - "+num2date(status[1]).strftime("%c, %fus")) self.parent.set_selection(status)
def on_release(self, event): if event.xdata is not None: self.t2_sel = event.xdata t1fmt = dt.datetime.strftime(mdates.num2date(self.t1_sel), "%d-%b-%Y-%H:%M:%S") t2fmt = dt.datetime.strftime(mdates.num2date(self.t2_sel), "%d-%b-%Y-%H:%M:%S") logmsg = "Selected the region: t1 = " + str(t1fmt) + ", t2 = " + str(t2fmt) pub.sendMessage("logger", logmsg) self.set_select_mode_off()
def srplot(self,contact=0): from matplotlib import dates from collections import Counter import copy import pylab as pl if contact!=0: database,name=findcontact(self) else: database=copy.deepcopy(self[1:]) name='all contacts' senttime=[] sentcount=[] rectime=[] reccount=[] for i in range(1,len(database)): if database[i]["sr"]=="Sent": senttime.append(int(database[i]["Datetime"])) if database[i]["sr"]=="Received": rectime.append(int(database[i]["Datetime"])) senttimedict=Counter(senttime) rectimedict=Counter(rectime) senttimedict=sorted(senttimedict.items()) rectimedict=sorted(rectimedict.items()) senttime=[] sentcount=[] rectime=[] reccount=[] for i in range(len(senttimedict)): senttime.append(senttimedict[i][0]) sentcount.append(senttimedict[i][1]) for i in range(len(rectimedict)): rectime.append(rectimedict[i][0]) reccount.append(rectimedict[i][1]) datesent = dates.num2date(senttime) daterec = dates.num2date(rectime) fig=pl.figure() ax=fig.add_subplot(111) ax.plot(datesent,sentcount,color='c',label="Sent") ax.autoscale_view() ax.grid(True) pl.ylabel("Messages per day") fig.autofmt_xdate() ax.plot(daterec,reccount,color='m',label="Recieved") ax.autoscale_view() ax.grid(True) pl.legend() pl.title('Texts sent from '+name) pl.show()
def mouseDoubleClickEvent(self, event): if not self.button_vcursor.isChecked(): vals = [] result = [] inv = self.ax.transData.inverted() inv2 = self.ax2.transData.inverted() try: [time_ax, val_ax] = inv.transform((event.x(), self.frameSize().height() - event.y())) except IndexError: [time_ax, val_ax] = transformCoord2Log((event.x(), self.frameSize().height() - event.y()), self.ax, self.ax2) t0, tmax = self.ax.get_xlim() for ax in self.fig.get_axes(): for lines in ax.get_lines(): if self.isinDict(lines): ind_t0 = indFinder(t0, lines.get_xdata()) ind_tmax = indFinder(tmax, lines.get_xdata()) step = 1 + (ind_tmax - ind_t0) / 400 ind = indFinder(time_ax, lines.get_xdata()) for i in range(ind - step, ind + step): if i >= 0 and i < len(lines.get_xdata()): try: new_coord = ax.transData.transform((lines.get_xdata()[i], lines.get_ydata()[i])) except TypeError: new_coord = transformCoord2Log((lines.get_xdata()[i], lines.get_ydata()[i]), self.ax, self.ax2, inv=True) if new_coord is not None: vals.append(np.sqrt((new_coord[0] - event.x()) ** 2 + ( new_coord[1] - (self.frameSize().height() - event.y())) ** 2)) result.append([lines.get_xdata()[i], lines.get_ydata()[i], ax, lines.get_label()]) if result: label_point = QLabel(self) label_point.setWordWrap(True) point = result[np.argmin(vals)] txt = "%s \r\n" % point[3] if point[2].get_yscale() == "log": txt += "%s \r\n % 0.3e" % (num2date(point[0]).strftime("%d/%m/%Y %H:%M:%S"), point[1]) else: txt += "%s \r\n % 0.2f" % (num2date(point[0]).strftime("%d/%m/%Y %H:%M:%S"), point[1]) if label_point.width() + event.x() > self.frameSize().width(): label_point.move(event.x() - label_point.width(), event.y() - 16) else: label_point.move(event.x(), event.y() - 16) line, = point[2].plot(point[0], point[1], 'o', color='k', markersize=4.) self.fig.canvas.restore_region(self.background) for ax in self.fig.get_axes(): for lines in ax.get_lines(): ax.draw_artist(lines) self.fig.canvas.blit(self.fig.bbox) label_point.setText(txt) label_point.show() timer = QTimer.singleShot(10000, partial(self.hidePoint, line, label_point, point[2]))
def date2yd(datetime_nums): "convert date to yearday" # imput must be a list of numbers yearday=[] for datetime_num in datetime_nums: year_day=num2date(datetime_num).strftime('%j') year_time=float(num2date(datetime_num).hour+(float(num2date(datetime_num).minute)/float(60))+(float(num2date(datetime_num).second)/float(3600)))/float(24) yearday.append(float(year_day)+year_time) return yearday
def _update(self,x): x = self._limit(x) if self.which: boundl = self.other boundr = x-self.offset else: boundl = x-self.offset boundr = self.other self.parent.set_message(num2date(boundl).strftime("%c, %fus")+" - "+num2date(boundr).strftime("%c, %fus")) self.parent.model.update_annotation(self.id,boundl,boundr)
def getemolt_uv(site, input_time, dep): """ get data from url, return datetime, u, v, depth input_time can either contain two values: start_time & end_time OR one value:interval_days """ url = "http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_sensor?emolt_sensor.SITE,emolt_sensor.YRDAY0_LOCAL,emolt_sensor.TIME_LOCAL,emolt_sensor.TEMP,emolt_sensor.DEPTH_I,emolt_sensor.U,emolt_sensor.V&emolt_sensor.SITE=" # get the emolt_sensor data dataset = get_dataset(url + '"' + site + '"') var = dataset["emolt_sensor"] print "Making lists of mooring data" u = list(var.U) v = list(var.V) depth = list(var.DEPTH_I) time0 = list(var.YRDAY0_LOCAL) year_month_day = list(var.TIME_LOCAL) print "Generating a datetime for mooring data" date_time, date_time_time = [], [] for i in scipy.arange(len(time0)): date_time_time.append( num2date(time0[i]) .replace(year=time.strptime(year_month_day[i], "%Y-%m-%d").tm_year) .replace(day=time.strptime(year_month_day[i], "%Y-%m-%d").tm_mday) ) date_time.append(date2num(date_time_time[i])) # +float(4)/24) # makes it UTC # get the index of sorted date_time print "Sorting mooring data by time" index = range(len(date_time)) index.sort(lambda x, y: cmp(date_time[x], date_time[y])) # reorder the list of date_time,u,v date_time_num = [date_time[i] for i in index] u = [u[i] for i in index] v = [v[i] for i in index] depth = [depth[i] for i in index] print "Delimiting mooring data according to user-specified time" part_v, part_u, part_time = [], [], [] if len(input_time) == 2: start_time = input_time[0] end_time = input_time[1] if len(input_time) == 1: start_time = date_time_num[0] end_time = start_time + input_time[0] print date_time_num[0], date_time_num[-1] for i in range(0, len(u)): if (start_time <= date_time_num[i] <= end_time) & (depth[i] == dep): part_v.append(v[i] / 100) part_u.append(u[i] / 100) part_time.append(num2date(date_time_num[i])) u = part_u v = part_v return part_time, u, v, depth, start_time, end_time
def num2ymd(T, t0=None, **kwargs): """ Converts matplotlib time to a year-month-day array format. Parameters ---------- T : array_like Array of matplotlib time. t0 : float, datetime.date, datetime.datetime, optional Reference date to calculate Julian day. If not set, calculates Julian day using the first of January for each year. Returns ------- YMD : array Two-dimensional array with columns indicating respectively 0--year, 1--month, 2--day, 3--hour, 4--minute, 5--second, 6--Julian day, 7--ISO week number, and 8--season. Season is given as a number from 1 to 4 indicating respectively winter, spring, summer and fall. See also -------- season """ # if t0 == None: _T0 = dates.datetime.date(year=1, month=1, day=1) elif isinstance(t0, float) | isinstance(t0, int): _T0 = dates.num2date(t0) _t0 = t0 - 1 # Makes sure Julian day starts at 1. elif (isinstance(t0, dates.datetime.date) | isinstance(t0, dates.datetime.datetime)): _T0 = t0 _t0 = dates.date2num(_T0) - 1 # Makes sure Julian day starts at 1. # If checks whether `t0` is an integer. This will be used later to decide # if Julian day will be returned as an integer. is_int = isinstance(t0, int) # Time = [] for t in T: # Converts matplotlib number to datetime object. day = dates.num2date(t) if is_int: t = int(t) # Checks if _T0.year is the same as current year for Julian day # calculation. if (t0 == None) & (_T0.year != day.year): _T0 = dates.datetime.date(year=day.year, month=1, day=1) _t0 = dates.date2num(_T0) - 1 # Makes sure Julian day starts at 1. # Appends current date and time values to output array. Time.append([day.year, day.month, day.day, day.hour, day.minute, day.second, t-_t0, day.isocalendar()[1], season(t, **kwargs)]) # return asarray(Time)
def prediction(crit, high_slope, low_slope, end_date): #目前沒有在使用了 est_period = int((math.fabs(crit[0][0] - crit[1][0]) + math.fabs(crit[2][0] - crit[3][0])) / int(2) ) sort_crit = sorted(crit, key = lambda data: data[0], reverse=True) message = "" if sort_crit[0][2] == True: message += "is on the fall, the last peak was at " + str(num2date(sort_crit[0][0])) # message += "\nIn %d days." % ((date2num(end_date))) else: message += "is on the rise, the last low was at " + str(num2date(sort_crit[0][0]))[:-15] # message += "\nIn %d days." % ((date2num(end_date))) return message
def get_days_and_nights(extracted_data, full_nights, full_days): # make full nights equal full days while (len(full_days) != len(full_nights)): if len(full_days) > len(full_nights): del full_days[-1] else: del full_nights[-1] start = full_nights[0][0] if full_nights[0][0] < full_days[0][0] else full_days[0][0] end = full_nights[-1][1] if full_nights[-1][1] > full_days[-1][1] else full_days[-1][1] return extract_times(extracted_data, md.num2date(start), md.num2date(end))
def plot_messages_time(conversation, chart_types={"stack plot":False, "stacked bar":False, "grouped bar":False, "line plot":False}, bin_size=7, group_messages=False, start_date=mdates.num2date(730120), end_date=mdates.num2date(1000000)): """ Expects a WhatsApp Chat log as a list item, and displays a graph of messages sent/received over time """ print("Formatting charts") # Set graph style plt.style.use("bmh") # Change graph style https://matplotlib.org/gallery/style_sheets/style_sheets_reference.html # Determine what data we need if chart_types["stack plot"]: cumulative_data = True else: cumulative_data = False if chart_types["stacked bar"] or chart_types["grouped bar"] or chart_types["line plot"]: non_cumulative_data = True else: non_cumulative_data = False # Get x chart data # Get cumulative time data if cumulative_data == True: print("Collating cumulative data") x_data_cum, tally_cum = collate_data(conversation, cumulative=True, group_messages=group_messages, start_date=start_date, end_date=end_date) # Time data # Create alias (later we need to reference any available data) x_data_alias = x_data_cum tally_alias = tally_cum print("Cumulative data successfully collated") # Get non-cumulative time data if non_cumulative_data == True: print("Collating non-cumulative data") x_data_noncum, tally_noncum = collate_data(conversation, cumulative=False, bin_size=bin_size, group_messages=group_messages, start_date=start_date, end_date=end_date) # Time data (may be slightly different to cumulative data due to bin sizes) # Create alias x_data_alias = x_data_noncum tally_alias = tally_noncum print("Non-cumulative data successfully collated") # Get participants that have sent messages (if using a set time period, some people may not have sent messages) participants = [] for person in tally_alias: if not all(x == 0 for x in tally_alias[person]): # If this person has any messages in this time period: participants.append(person) num_participants = len(participants) # Process tally data to y chart data if cumulative_data == True: y_data_cum = [ tally_cum[person] for person in participants ] # convert tally data from dict to lists (order matters, so we use a set list to loop, ie. participants) if non_cumulative_data == True: y_data_noncum = [ tally_noncum[person] for person in participants ] # Count how many subplots we need subplot_cnt = 0 for key in chart_types: if chart_types[key]: subplot_cnt += 1 # Create figure and a subplot for each graph if subplot_cnt > 1: # if we want more than 1 subplot fig, axs = plt.subplots(nrows=subplot_cnt, ncols=1, figsize=(20, 6*subplot_cnt)) # 6 inch height per subplot else: fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(20, 6)) axs = [ax] # The following code loops through axs a lot, so we need to create an iterable: axs # Set x axis labels chart_time_span = x_data_alias[-1] - x_data_alias[0] # Get chart time span in days if chart_time_span > 1825: # > 5 years major_tick = mdates.YearLocator() # Set major ticks to year x_axis_format = mdates.DateFormatter("%Y") # Label ticks eg. "2020" minor_tick = mdates.MonthLocator() # minor ticks to month elif chart_time_span > 365: # > 1 year major_tick = mdates.MonthLocator() # major ticks to months x_axis_format = mdates.DateFormatter("%b '%y") # Label ticks eg. "Sep '19" minor_tick = mdates.WeekdayLocator() # minor ticks to week else: # < 1 year major_tick = mdates.WeekdayLocator() # major ticks to week x_axis_format = mdates.DateFormatter("%d %b") # label tick eg "18 Jun" minor_tick = mdates.DayLocator() # minor ticks to day # Format ticks on graphs for ax in axs: ax.xaxis.set_major_locator(major_tick) # Either major: years, minor months, or major months, minor weekdays ax.xaxis.set_major_formatter(x_axis_format) ax.xaxis.set_minor_locator(minor_tick) # Set colours of graphs for ax in axs: ax.set_facecolor("#242424") # Background colour ax.grid(color="#eeeeee") # Grid colour ax.tick_params(axis='x', which='minor', colors='#eeeeee') # set minor tick colour ax.tick_params(bottom=True, left= False, color="#ffffff", length=5, width=1.3) # Set little white markers for x-axis ticks # Rainbow colours for graph lines colours = [ "#e96841", "#ed9a4a", "#f0c054", "#f4ef5f", "#c5d966", "#92cb6a", "#44bb6b", "#2fc0b9", "#1cc5ec", "#4398d1", "#5577c1", "#6153a8", "#9158a7", "#b75fab", "#e566ab", "#e16378", ] # Run colour list through an algorithm to provide better contrast colours = colour_selection(colours, num_participants) # comment this line out if you change the colours above bin_size_translation = {1: "day", 7: "week", 14: "2 weeks", 21: "3 weeks", 28: "4 weeks", 30: "month", 180: "6 months"} if bin_size in bin_size_translation: bin_label = bin_size_translation[bin_size] else: bin_label = f"{bin_size} days" # Plot the data on the subplots i = 0 # count how many graphs we're plotted if chart_types["stack plot"]: print("Plotting stack plot...", end="") axs[i].stackplot(x_data_cum, y_data_cum, labels=participants, colors=colours) handles, labels = axs[0].get_legend_handles_labels() axs[i].legend(reversed(handles), reversed(labels), loc='upper left') axs[i].set_ylabel('Cumulative Messages Sent') i += 1 print(" done!") if chart_types["stacked bar"]: print("Plotting stacked bar chart...", end="") # Calcualte best bar width num_bars = ((x_data_noncum[-1] - x_data_noncum[0]) / bin_size) # Determine how many bars we're plotting x_units = x_data_noncum[-1] - x_data_noncum[0] # number of units on the x axis width = x_units / num_bars * 0.8 # (set bars at 80% width to add spacing) # Offset bars x_indexes = np.asarray(x_data_noncum) # To be able to offset bars (by width) with matplotlib, data must be an nparray x_offset = -width/2 # offset so the right edge of the bar lines up with the day value (better for large bin sizes) bottom_list = [ [0] * len(x_data_noncum) ] # Create a list of values for the base of each bar (starting with an array of zeroes) for j in range(1, num_participants): temp = np.add(y_data_noncum[j-1], bottom_list[-1]).tolist() # Find starting height for bars by adding previous persons data to previous starting point bottom_list.append(temp) # Plot data for j in range(0, num_participants): # Each chart participant is plotted axs[i].bar(x_indexes+x_offset, y_data_noncum[j], bottom=bottom_list[j], width=width, label=participants[j], color=colours[j % len(colours)]) if i == 0: axs[i].legend() axs[i].set_ylabel(f'Messages Sent [per {bin_label}]') i += 1 print(" done!") if chart_types["line plot"]: print("Plotting line plot...", end="") for j in range(0, num_participants): # We have to plot each chat participant separately axs[i].plot(x_data_noncum, y_data_noncum[j], linewidth=1, label=participants[j], color=colours[j % len(colours)]) if i == 0: axs[i].legend() axs[i].set_ylabel(f'Messages Sent [per {bin_label}]') i += 1 print(" done!") if chart_types["grouped bar"]: print("Plotting grouped bar chart...", end="") num_bars = ((x_data_noncum[-1] - x_data_noncum[0]) / bin_size) * num_participants # number of bars to display x_units = x_data_noncum[-1] - x_data_noncum[0] # number of units on the x axis width = x_units / num_bars * 0.70 # (set bars at 80% width to add spacing) x_indexes = np.asarray(x_data_noncum) # To be able to offset bars (by width) with matplotlib, data must be an nparray for j in range(0, num_participants): # Each chart participant is plotted x_offset = +(0.5 * width) - (width * num_participants) + width*(j) # Right side of right most bar lines up with date value (better for large bin sizes) #x_offset = -(width * num_participants)/2 + width*(j) # Bars are centred around date value (with large bin size, may add excess space to the right of chart) axs[i].bar(x_indexes+x_offset, y_data_noncum[j], width=width, label=participants[j], color=colours[j % len(colours)]) if i == 0: # We only need 1 legend axs[i].legend() axs[i].set_ylabel(f'Messages Sent [per {bin_label}]') i += 1 print(" done!") print("All charts plotted successfully!") # Find max x value of graph if non_cumulative_data == True: # Because bin_size might be >1, the time axis for non-cumulative data may have a higher max min_time = x_data_noncum[0] # We can't simply take the last x_data value for max_time as a large bin_size may have added excess if end_date == mdates.num2date(1000000): # default end date max_time = mdates.date2num(conversation.message_log[-1]["date"]) # Last message date else: max_time = mdates.date2num(end_date) #if chart_types["grouped bar"]: # Depending on bar offset, we might want to add some space to the right of the chart #max_time = max_time + bin_size/2 elif cumulative_data == True: min_time = x_data_cum[0] max_time = x_data_cum[-1] # Trim graphs for ax in axs: ax.set_xlim(min_time, max_time) ax.set_ylim(0) fig.suptitle(f"{conversation.title}", fontsize=15) plt.xlabel("Date") # Set tick label rotation of each axes for ax in fig.axes: plt.sca(ax) # set current axes to ax plt.xticks(rotation=90) # Set rotation plt.subplots_adjust(left=0.06, bottom=0.11, right=0.97, top=0.94, wspace=0.13, hspace=0.17) # Optimise white space around plots print("Displaying charts") plt.show()
data_tuples.append(row) #Plot (Barchart) # Datatypes of the returning data: column 1(col1) --> integer, column 2(date) --> string datatypes = [('col1', 'i4'), ('date', 'S20')] # Data-tuple and datatype data = np.array(data_tuples, dtype=datatypes) # col1 = data['col1'] # Converts date to a manageable date-format for matplotlib dates = mdates.num2date(mdates.datestr2num(data['date'])) fig, ax1 = plt.subplots() # Create barchart (x-axis=dates, y-axis=col1, ax1.plot(dates, col1, linewidth = '2', color = '#2dd700') # Place a gray dashed grid behind the thicks (only for y-axis) ax1.yaxis.grid(color='gray', linestyle='dashed') # Set this grid behind the thicks ax1.set_axisbelow(True) # Rotate x-labels on the x-axis fig.autofmt_xdate() # Label x- and y-axis
def plot_monitoring(y_true, y_pred, timestamp=None, interval='month', metrics=None, classification=False, **kwargs): """Plots model performance over a timestamp array which represent the date or timestamp of the prediction. If timestamp is None or interval then it just compute the metrics on all the predictions. If interval is not None it can be one of the following : 'year', 'month', 'day' or 'hour'. - 'year' : format '%Y' - 'month' : format '%Y-%m' - 'day' : format '%Y-%m-%d' - 'hour' : format '%Y-%m-%d-%r' If it's for a classification and you're using y_pred as probabilities don't forget to pass the classification=True argument ! You can use your choosing metrics. for that refer to the `evaluation metrics`_ documentation. .. _evaluation metrics: # Parameters ---------- y_true: array like True labels y_pred: array like (1D or 2D) if 1D array Predicted labels, if 2D array probabilities (returns of a predict_proba function) timestamp: array like or None (default None) Array of datetime when the prediction occured interval: str or None (default 'month') interval to format the timestamp with metrics: list (default None) List of metrics to compute classification: bool (default True) Whether the ML task is a classification or not """ scores = monitoring.monitor_model(y_true, y_pred, timestamp, interval, metrics, classification) scores = scores[['count'] + scores.columns.values.tolist()[:-1]] n_rows = int(len(scores.columns) / 2) + 1 fig = plt.figure(figsize=(15, 5 * n_rows)) gs = fig.add_gridspec(n_rows, 2, hspace=0.3) dates = mdates.num2date(mdates.datestr2num(scores.index)) fig.autofmt_xdate() for i, (name, score) in enumerate(scores.iteritems()): ax = fig.add_subplot(gs[int(i / 2), i % 2]) if name == 'count': plt.bar(dates, score, width=7) else: plt.plot(dates, score) ymin, ymax = ax.get_ylim() ax.set_ylim((ymin * 0.9, ymax * 1.1)) plt.setp(ax.xaxis.get_majorticklabels(), rotation=18) ax.set_title(name) fig.suptitle('Model performance', fontsize=16) # plt.show() return plots.plot_or_figure(fig, **kwargs)
def plot_echogram(V, plot_start_day, plot_range_day, plot_param, fig_size=(16, 7), cmap_name='viridis'): x_ticks_spacing = plot_param["x_ticks_spacing"] # spacing: in num of days y_ticks_num = plot_param["y_ticks_num"] y_start_idx = plot_param["y_start_idx"] y_end_idx = plot_param["y_end_idx"] y_offset_idx = plot_param["y_offset_idx"] c_min = plot_param["c_min"] c_max = plot_param["c_max"] c_ticks_spacing = plot_param["c_ticks_spacing"] ping_per_day_mvbs = plot_param["ping_per_day_mvbs"] depth_bin_size = plot_param["depth_bin_size"] ping_time = plot_param["ping_time"] v_mtx = V[:,y_start_idx:(V.shape[1]+y_end_idx),\ ping_per_day_mvbs*(plot_start_day-1)+np.arange(ping_per_day_mvbs*plot_range_day)] y_ticks_spacing = np.floor(v_mtx.shape[1] / (y_ticks_num - 1)).astype(int) y_ticks = np.arange(0, v_mtx.shape[1], y_ticks_spacing) y_ticklabels = y_ticks * depth_bin_size + (y_start_idx + y_offset_idx) * depth_bin_size x_ticks = np.arange(0, plot_range_day, x_ticks_spacing) * ping_per_day_mvbs x_ticks_in_ping_time = np.arange(plot_start_day - 1, plot_start_day - 1 + plot_range_day, x_ticks_spacing) * ping_per_day_mvbs x_ticklabels = [ num2date(xx).strftime('%m/%d') for xx in ping_time[x_ticks_in_ping_time] ] #x_ticklabels = [num2date(xx).strftime('%m/%d') for xx in ping_time[x_ticks[1:]]] #x_ticklabels.insert(0,num2date(ping_time[x_ticks[0]]).strftime('%b-%d')) c_ticks = np.arange(c_min, c_max + c_ticks_spacing, c_ticks_spacing) fig, ax = plt.subplots(3, 1, figsize=fig_size, sharex=True) for iX in range(3): im = ax[iX].imshow(v_mtx[iX,::-1,:],aspect='auto',\ vmax=c_max,vmin=c_min,cmap=cmap_name) divider = make_axes_locatable(ax[iX]) cax = divider.append_axes("right", size="1%", pad=0.1) cbar = plt.colorbar(im, cax=cax, ticks=c_ticks) ax[iX].set_yticks(y_ticks) ax[iX].set_yticklabels(y_ticklabels, fontsize=14) ax[iX].set_ylabel('Depth (m)', fontsize=16) if iX == 2: ax[iX].set_xticks(x_ticks) ax[iX].set_xticklabels(x_ticklabels, fontsize=14) ax[iX].set_xlabel('Date', fontsize=16) #if iX==0: # ax[iX].set_title('38 kHz',fontsize=14) #elif iX==1: # ax[iX].set_title('120 kHz',fontsize=14) #else: # ax[iX].set_title('200 kHz',fontsize=14) if plot_range_day <= 20: # if short time plot day separator for dd in range(1, plot_range_day): ax[iX].plot(np.array((dd, dd)) * ping_per_day_mvbs, (0, v_mtx.shape[1]), '--', color=(0.8, 0.8, 0.8)) plt.tight_layout(h_pad=0.1)
def test_auto_date_locator(): def _create_auto_date_locator(date1, date2): locator = mdates.AutoDateLocator() locator.create_dummy_axis() locator.set_view_interval(mdates.date2num(date1), mdates.date2num(date2)) return locator d1 = datetime.datetime(1990, 1, 1) results = ( [ datetime.timedelta(weeks=52 * 200), [ '1990-01-01 00:00:00+00:00', '2010-01-01 00:00:00+00:00', '2030-01-01 00:00:00+00:00', '2050-01-01 00:00:00+00:00', '2070-01-01 00:00:00+00:00', '2090-01-01 00:00:00+00:00', '2110-01-01 00:00:00+00:00', '2130-01-01 00:00:00+00:00', '2150-01-01 00:00:00+00:00', '2170-01-01 00:00:00+00:00' ] ], [ datetime.timedelta(weeks=52), [ '1990-01-01 00:00:00+00:00', '1990-02-01 00:00:00+00:00', '1990-03-01 00:00:00+00:00', '1990-04-01 00:00:00+00:00', '1990-05-01 00:00:00+00:00', '1990-06-01 00:00:00+00:00', '1990-07-01 00:00:00+00:00', '1990-08-01 00:00:00+00:00', '1990-09-01 00:00:00+00:00', '1990-10-01 00:00:00+00:00', '1990-11-01 00:00:00+00:00', '1990-12-01 00:00:00+00:00' ] ], [ datetime.timedelta(days=141), [ '1990-01-05 00:00:00+00:00', '1990-01-26 00:00:00+00:00', '1990-02-16 00:00:00+00:00', '1990-03-09 00:00:00+00:00', '1990-03-30 00:00:00+00:00', '1990-04-20 00:00:00+00:00', '1990-05-11 00:00:00+00:00' ] ], [ datetime.timedelta(days=40), [ '1990-01-03 00:00:00+00:00', '1990-01-10 00:00:00+00:00', '1990-01-17 00:00:00+00:00', '1990-01-24 00:00:00+00:00', '1990-01-31 00:00:00+00:00', '1990-02-07 00:00:00+00:00' ] ], [ datetime.timedelta(hours=40), [ '1990-01-01 00:00:00+00:00', '1990-01-01 04:00:00+00:00', '1990-01-01 08:00:00+00:00', '1990-01-01 12:00:00+00:00', '1990-01-01 16:00:00+00:00', '1990-01-01 20:00:00+00:00', '1990-01-02 00:00:00+00:00', '1990-01-02 04:00:00+00:00', '1990-01-02 08:00:00+00:00', '1990-01-02 12:00:00+00:00', '1990-01-02 16:00:00+00:00' ] ], [ datetime.timedelta(minutes=20), [ '1990-01-01 00:00:00+00:00', '1990-01-01 00:05:00+00:00', '1990-01-01 00:10:00+00:00', '1990-01-01 00:15:00+00:00', '1990-01-01 00:20:00+00:00' ] ], [ datetime.timedelta(seconds=40), [ '1990-01-01 00:00:00+00:00', '1990-01-01 00:00:05+00:00', '1990-01-01 00:00:10+00:00', '1990-01-01 00:00:15+00:00', '1990-01-01 00:00:20+00:00', '1990-01-01 00:00:25+00:00', '1990-01-01 00:00:30+00:00', '1990-01-01 00:00:35+00:00', '1990-01-01 00:00:40+00:00' ] ], [ datetime.timedelta(microseconds=1500), [ '1989-12-31 23:59:59.999500+00:00', '1990-01-01 00:00:00+00:00', '1990-01-01 00:00:00.000500+00:00', '1990-01-01 00:00:00.001000+00:00', '1990-01-01 00:00:00.001500+00:00' ] ], ) for t_delta, expected in results: d2 = d1 + t_delta locator = _create_auto_date_locator(d1, d2) assert list(map(str, mdates.num2date(locator()))) == expected
#%% Read RTOFS grid and time print('Retrieving coordinates from RTOFS') ncRTOFS = xr.open_dataset(nc_files_RTOFS[0]) latRTOFS = ncRTOFS.Latitude[:] lonRTOFS = ncRTOFS.Longitude[:] depthRTOFS = ncRTOFS.Depth[:] #for t in np.arange(len(nc_files_RTOFS)): tRTOFS = [] for t in np.arange(2): ncRTOFS = xr.open_dataset(nc_files_RTOFS[t]) tRTOFS.append(np.asarray(ncRTOFS.MT[:])[0]) tRTOFS = np.asarray([mdates.num2date(mdates.date2num(tRTOFS[t])) \ for t in np.arange(len(nc_files_RTOFS))]) #%% Loop through gliders for id in gliders: #id = gliders[0] print('Reading ' + id) e.dataset_id = id e.constraints = constraints e.variables = variables # Converting glider data to data frame df = e.to_pandas( index_col='time (UTC)', parse_dates=True,
index_adcp_flach = 35 for i, depth in enumerate(depth_flach): print(i, depth, "data:", not np.all(np.isnan(all_west_east_flach[i, :]))) print("index_adcp_flach", index_adcp_flach, depth_flach[index_adcp_flach]) vert_v_flach = vert_v[index_adcp_flach, 0:8086] print("all_west_east_flach", np.shape(all_west_east_flach)) west_east_flach = all_west_east_flach[index_adcp_flach, :] north_south_flach = all_north_south_flach[index_adcp_flach, :] print(west_east_flach) #convert matlab time to utc utc_flach = np.asarray(mdates.num2date(rtc - 366)) print("utc flach:", np.shape(utc_flach), np.shape(west_east_flach)) #Load TC-tief #------------------------- #print(sio.whosmat(FILENAME)) datafile_path = "/home/ole/windows/all_data/emb217/deployments/moorings/TC_Tief/adcp/data/EMB217_TC-tief_adcp300_val.mat" data = sio.loadmat(datafile_path) data = data["adcpavg"] substructure = data.dtype depth_tief = (data["depth"][0][0]).flatten() number_of_depth_bins_tief = depth_tief.size assert (np.mean(depth_tief) > 0) rtc = data["rtc"][0][0].flatten()
'transfer_Fernando/ext-PSY4V3R1_1dAV_20190301_20190302_gridV_R20190313.nc') v = np.ma.filled(filev['vomecrty'][::], fill_value=0) utim = np.zeros([ntimes, limn, u.shape[2], u.shape[3]]) vtim = np.zeros([ntimes, limn, v.shape[2], v.shape[3]]) depth = np.ma.filled(fileu['vozocrtx'][::], fill_value=19999999) depth = depth[0, 0, :, :] depth[depth < 19999999] = 1000 depth[depth >= 19999999] = -1000 layer = -np.ma.filled(fileu['deptht'][::], fill_value=0) layer = layer[0:limn] for i in range(ntimes): fileu = input + dates.num2date( date[i]).strftime("%Y%m%d") + '_' + dates.num2date( date[i] + 1).strftime("%Y%m%d") + '_gridU_R20190313.nc' filev = input + dates.num2date( date[i]).strftime("%Y%m%d") + '_' + dates.num2date( date[i] + 1).strftime("%Y%m%d") + '_gridV_R20190313.nc' fileu = dat(fileu) filev = dat(filev) u = np.ma.filled(fileu['vozocrtx'][:, limn - 1, ::], fill_value=0) v = np.ma.filled(filev['vomecrty'][:, limn - 1, ::], fill_value=0) utim[i, ::] = u vtim[i, ::] = v u1 = np.squeeze(utim).copy() v1 = np.squeeze(vtim).copy() time = pd.read_csv('time_hours.txt',
2], true_dates[last_date_idx, 1], true_dates[last_date_idx, 0]) # obtengo la fecha de fin all_ticks = numpy.linspace( date2num(start_date), date2num(end_date), test_size) # obtengo un arreglo con todos los valores numericos de fechas tick_spacing = test_size if test_size <= 60 else 12 date_format = "%m/%d" if test_size <= 60 else "%y/%m" # major_ticks = numpy.arange(date2num(start_date), date2num(end_date), tick_spacing) # obtengo un arreglo con los valores de fecha que quiero mostrar major_ticks = numpy.linspace( date2num(start_date), date2num(end_date), tick_spacing ) # obtengo un arreglo con los valores de fecha que quiero mostrar major_tick_labels = [ date.strftime(date_format) for date in num2date(major_ticks) ] # PLOTEO DE LA DEMANDA DE SALIDA JUNTO CON LA PORCION DE INCUMBENCIA DE LOS DATOS ORIGINALES ----------------------------------------- true_out_demand = demand_ds[test_lower_limit:test_upper_limit, 1] predicted_out_demand = predicted[:, 1] plot_w_xticks(all_ticks, major_ticks, major_tick_labels, [(true_out_demand, 'b-o'), (predicted_out_demand, 'r-o')]) axes = plt.gca() axes.set_ylim([0, 1]) # seteo limite en el eje y entre 0 y 1 plt.show() # PLOTEO DEL ERROR --------------------------------------------------------------------------------------------------- diff = true_out_demand - predicted_out_demand diff = abs(diff) error_ds = diff / (predicted_out_demand + 0.0001)
def checkVisPA(ra, dec, targetName=None, ephFileName=None, fig=None): """Check the visibility at a range of position angles Parameters ---------- ra: float The RA of the target dec: float The Dec of the target targetName: str The target name ephFileName: str The filename of the ephemeris file fig: matplotlib.pyplot.figure, bokeh.plotting.figure The figure to plot to Returns ------- paGood : float The good position angle. paBad : float The bad position angle. gd : matplotlib.dates object The greogrian date. fig : matplotlib.pyplot object The plotted figure. """ if ephFileName is None: eph_file = 'data/contam_visibility/JWST_ephem_short.txt' ephFileName = pkg_resources.resource_filename('exoctk', eph_file) if ra.find(':') > -1: # format is hh:mm:ss.s or dd:mm:ss.s ra = convert_ddmmss_to_float(ra) * 15. * D2R dec = convert_ddmmss_to_float(dec) * D2R else: # format is decimal ra = float(ra) * D2R dec = float(dec) * D2R # load ephemeris eclFlag = False eph = EPH.Ephemeris(ephFileName, eclFlag) # convert dates from MJD to Gregorian calendar dates mjd = np.array(eph.datelist) d = mdates.julian2num(mjd + 2400000.5) gd = mdates.num2date(d) # loop through dates and determine VIS and PAs (nominal, min, max) vis = np.empty(mjd.size, dtype=bool) paNom = np.empty(mjd.size) paMin = np.empty(mjd.size) paMax = np.empty(mjd.size) for i in range(mjd.size): # is it visible? vis[i] = eph.in_FOR(mjd[i], ra, dec) # nominal PA at this date pa = eph.normal_pa(mjd[i], ra, dec) # search for minimum PA allowed by roll pa0 = pa while eph.is_valid(mjd[i], ra, dec, pa0 - 0.002): pa0 -= 0.002 # search for maximum PA allowed by roll pa1 = pa while eph.is_valid(mjd[i], ra, dec, pa1 + 0.002): pa1 += 0.002 paNom[i] = (pa * R2D) % 360 paMin[i] = (pa0 * R2D) % 360 paMax[i] = (pa1 * R2D) % 360 # does PA go through 360 deg? wrap = np.any(np.abs(np.diff(paNom[np.where(vis)[0]])) > 350) # Determine good and bad PA ranges # Good PAs i, = np.where(vis) pa = np.concatenate((paNom[i], paMin[i], paMax[i])) if wrap: pa = np.append(pa, (0., 360.)) pa.sort() i1, = np.where(np.diff(pa) > 10) i0 = np.insert(i1 + 1, 0, 0) i1 = np.append(i1, -1) paGood = np.dstack((pa[i0], pa[i1])).round(1).reshape(-1, 2).tolist() # bad PAs (complement of the good PAs) paBad = [] if paGood[0][0] > 0: paBad.append([0., paGood[0][0]]) for i in range(1, len(paGood)): paBad.append([paGood[i - 1][1], paGood[i][0]]) if paGood[-1][1] < 360.: paBad.append([paGood[-1][1], 360.]) # Make a figure if fig is None or fig == True: fig = plt.gcf() # Do all figure calculations iBad, = np.where(vis == False) paMasked = np.copy(paNom) paMasked[iBad] = np.nan gdMasked = np.copy(gd) i = np.argmax(paNom) if paNom[i + 1] < 10: i += 1 paMasked = np.insert(paMasked, i, np.nan) gdMasked = np.insert(gdMasked, i, gdMasked[i]) i = np.argmax(paMin) goUp = paMin[i - 2] < paMin[i - 1] # PA going up at wrap point? # Top part i0_top = 0 if goUp else i i1_top = i if goUp else paMin.size - 1 paMaxTmp = np.copy(paMax) paMaxTmp[np.where(paMin > paMax)[0]] = 360 # Bottom part i = np.argmin(paMax) i0_bot = i if goUp else 0 i1_bot = paMin.size - 1 if goUp else i paMinTmp = np.copy(paMin) paMinTmp[np.where(paMin > paMax)[0]] = 0 # Add fits to matplotlib if isinstance(fig, matplotlib.figure.Figure): # Make axes ax = plt.axes() plt.title(targetName) # plot nominal PA plt.plot(gdMasked, paMasked, color='k') # plot ranges allowed through roll if wrap: i = np.argmax(paMin) goUp = paMin[i - 2] < paMin[i - 1] # PA going up at wrap point? # top part plt.fill_between(gd[i0_top:i1_top + 1], paMin[i0_top:i1_top + 1], paMaxTmp[i0_top:i1_top + 1], where=vis[i0_top:i1_top + 1], lw=0, facecolor='k', alpha=0.5) # bottom part plt.fill_between(gd[i0_bot:i1_bot + 1], paMinTmp[i0_bot:i1_bot + 1], paMax[i0_bot:i1_bot + 1], where=vis[i0_bot:i1_bot + 1], lw=0, facecolor='k', alpha=0.5) else: plt.fill_between(gd, paMin, paMax, where=vis, lw=0, facecolor='k', alpha=0.5) plt.ylabel('Position Angle (degrees)') plt.xlim(min(gd), max(gd)) ax.xaxis.set_major_locator(mdates.MonthLocator()) ax.xaxis.set_major_formatter(mdates.DateFormatter("%b '%y")) ax.xaxis.set_minor_locator(mdates.DayLocator(list(range(1, 32, 5)))) plt.ylim(0, 360) ax.yaxis.set_major_locator(MultipleLocator(25)) ax.yaxis.set_minor_locator(MultipleLocator(5)) plt.grid() for label in ax.get_xticklabels(): label.set_rotation(45) # Or to bokeh! else: # Convert datetime to a number for Bokeh gdMaskednum = [ datetime.date(2019, 6, 1) + datetime.timedelta(days=n) for n, d in enumerate(gdMasked) ] color = 'green' # Draw the curve and error fig.line(gdMaskednum, paMasked, legend='cutoff', line_color=color) # Top err_y = np.concatenate( [paMin[i0_top:i1_top + 1], paMaxTmp[i0_top:i1_top + 1][::-1]]) err_x = np.concatenate([ gdMaskednum[i0_top:i1_top + 1], gdMaskednum[i0_top:i1_top + 1][::-1] ]) fig.patch(err_x, err_y, color=color, fill_alpha=0.2, line_alpha=0) # Bottom err_y = np.concatenate( [paMinTmp[i0_bot:i1_bot + 1], paMax[i0_bot:i1_bot + 1][::-1]]) err_x = np.concatenate([ gdMaskednum[i0_bot:i1_bot + 1], gdMaskednum[i0_bot:i1_bot + 1][::-1] ]) fig.patch(err_x, err_y, color=color, fill_alpha=0.2, line_alpha=0) # Plot formatting fig.xaxis.axis_label = 'Date' fig.yaxis.axis_label = 'Position Angle (degrees)' return paGood, paBad, gd, fig
def __init__(self, master): # load data datetime_list, barpress_list = [], [] datetime_re = re.compile(r'[\d]{2,4}') # regex to get datetime info for year in range(2012, 2016): fname = '..\\resources\\Environmental_Data_Deep_Moor_{0}.txt'.format( year) print('Loading {0}'.format(fname)) for row in DictReader(open(fname, 'r'), delimiter='\t'): barpress_list.append(float(row['Barometric_Press'])) datetime_list.append( date2num( datetime(*list( map( int, datetime_re.findall( row['date time '])))))) self.datetime_array = np.array(datetime_list) self.barpress_array = np.array(barpress_list) # build the gui master.title('Weather Statistics') master.resizable(True, True) master.state('zoomed') matplotlib.rc('font', size=18) f = Figure() f.set_facecolor((0, 0, 0, 0)) self.a = f.add_subplot(111) self.canvas = FigureCanvasTkAgg(f, master) self.canvas.draw() toolbar_frame = ttk.Frame(master) # needed to put navbar above plot toolbar = NavigationToolbar2Tk(self.canvas, toolbar_frame) toolbar.update() toolbar_frame.pack(side=TOP, fill=X, expand=0) self.canvas._tkcanvas.pack(fill=BOTH, expand=1) controls_frame = ttk.Frame(master) controls_frame.pack() ttk.Label(controls_frame, text='Start', font='Arial 18 bold').grid(row=0, column=0, pady=5) ttk.Label(controls_frame, text='(YYYY-MM-DD HH:MM:SS)', font='Courier 12').grid(row=1, column=0, padx=50, sticky='s') self.start = StringVar() ttk.Entry(controls_frame, width=19, textvariable=self.start, font='Courier 12').grid(row=2, column=0, sticky='n') self.start.set(str(num2date(self.datetime_array[0]))[0:19]) ttk.Label(controls_frame, text='End', font='Arial 18 bold').grid(row=0, column=1, pady=5) ttk.Label(controls_frame, text='(YYYY-MM-DD HH:MM:SS)', font='Courier 12').grid(row=1, column=1, padx=50, sticky='s') self.end = StringVar() ttk.Entry(controls_frame, width=19, textvariable=self.end, font='Courier 12').grid(row=2, column=1, sticky='n') self.end.set(str(num2date(self.datetime_array[-1]))[0:19]) ttk.Button(controls_frame, text='Update', command=self._update).grid(row=3, column=0, columnspan=2, pady=10) ttk.Style().configure('TButton', font='Arial 18 bold') self._update()
grid=args.grid ncfile=args.ncfile ncloc=ncfile.rindex('/') if args.station: data = loadnc(ncfile[:ncloc+1],ncfile[ncloc+1:],False) data['lon']=data['lon']-360 data['x'],data['y'],data['proj']=lcc(data['lon'],data['lat']) x,y=data['x'],data['y'] lon,lat=data['lon'],data['lat'] tag='station' if 'time' in data: data['time']=data['time']+678576 #older station files if 'time_JD' in data: data['time']=data['time_JD']+(data['time_second']/86400.0)+678576 data['dTimes']=dates.num2date(data['time']) data['Time']=np.array([ct.isoformat(sep=' ')[:19] for ct in data['dTimes']]) else: data = loadnc(ncfile[:ncloc+1],ncfile[ncloc+1:]) lon,lat=data['lon'],data['lat'] x,y=data['x'],data['y'] tag='fvcom' print('done load') savepath='{}/{}/buoy/{}/'.format(datapath,grid,name) if not os.path.exists(savepath): os.makedirs(savepath)
def plot(data, **kwargs): """ Given a Pandas DataFrame containing columns Open,High,Low,Close and optionally Volume with a DatetimeIndex, plot the data. Available plots include ohlc bars, candlestick, and line plots. Also provide visually analysis in the form of common technical studies, such as: moving averages, renko, etc. Also provide ability to plot trading signals, and/or addtional user-defined data. """ config = _process_kwargs(kwargs, _valid_plot_kwargs()) dates, opens, highs, lows, closes, volumes = _check_and_prepare_data( data, config) if config['type'] in VALID_PMOVE_TYPES and config['addplot'] is not None: err = "`addplot` is not supported for `type='" + config['type'] + "'`" raise ValueError(err) style = config['style'] if isinstance(style, str): style = _styles._get_mpfstyle(style) if isinstance(style, dict): _styles._apply_mpfstyle(style) w, h = config['figratio'] r = float(w) / float(h) if r < 0.25 or r > 4.0: raise ValueError( '"figratio" (aspect ratio) must be between 0.25 and 4.0 (but is ' + str(r) + ')') base = (w, h) figscale = config['figscale'] fsize = [d * figscale for d in base] fig = plt.figure() fig.set_size_inches(fsize) if config['volume'] and volumes is None: raise ValueError('Request for volume, but NO volume data.') if config['volume']: if config['volume'] not in ['B', 'C']: config['volume'] = 'B' ha, hb, hc = _determine_relative_panel_heights(config['addplot'], config['volume'], config['panel_ratio']) axA1, axA2, axB1, axB2, axC1, axC2, actual_order = _create_panel_axes( fig, ha, hb, hc, config['panel_order']) internalAxes = dict(A=(axA1, axA2), B=(axB1, axB2), C=(axC1, axC2)) volumeAxes = internalAxes[ config['volume']][0] if config['volume'] else None fmtstring = _determine_format_string(dates, config['datetime_format']) ptype = config['type'] if config['show_nontrading']: formatter = mdates.DateFormatter(fmtstring) xdates = dates else: formatter = IntegerIndexDateTimeFormatter(dates, fmtstring) xdates = np.arange(len(dates)) axA1.xaxis.set_major_formatter(formatter) collections = None if ptype == 'line': axA1.plot(xdates, closes, color=config['linecolor']) else: collections = _construct_mpf_collections(ptype, dates, xdates, opens, highs, lows, closes, volumes, config, style) if ptype in VALID_PMOVE_TYPES: collections, new_dates, volumes, brick_values, size = collections formatter = IntegerIndexDateTimeFormatter(new_dates, fmtstring) xdates = np.arange(len(new_dates)) axA1.xaxis.set_major_formatter(formatter) if collections is not None: for collection in collections: axA1.add_collection(collection) mavgs = config['mav'] if mavgs is not None: if isinstance(mavgs, int): mavgs = mavgs, # convert to tuple if len(mavgs) > 7: mavgs = mavgs[0:7] # take at most 7 if style['mavcolors'] is not None: mavc = cycle(style['mavcolors']) else: mavc = None for mav in mavgs: if ptype in VALID_PMOVE_TYPES: mavprices = pd.Series(brick_values).rolling(mav).mean().values else: mavprices = pd.Series(closes).rolling(mav).mean().values if mavc: axA1.plot(xdates, mavprices, color=next(mavc)) else: axA1.plot(xdates, mavprices) avg_dist_between_points = (xdates[-1] - xdates[0]) / float(len(xdates)) minx = xdates[0] - avg_dist_between_points maxx = xdates[-1] + avg_dist_between_points if len(xdates) == 1: # kludge special case minx = minx - 0.75 maxx = maxx + 0.75 if ptype not in VALID_PMOVE_TYPES: _lows = lows _highs = highs else: _lows = brick_values _highs = [brick + size for brick in brick_values] miny = np.nanmin(_lows) maxy = np.nanmax(_highs) #if len(xdates) > 1: # stdy = (stat.stdev(_lows) + stat.stdev(_highs)) / 2.0 #else: # kludge special case # stdy = 0.02 * math.fabs(maxy - miny) # print('minx,miny,maxx,maxy,stdy=',minx,miny,maxx,maxy,stdy) if config['set_ylim'] is not None: axA1.set_ylim(config['set_ylim'][0], config['set_ylim'][1]) else: corners = (minx, miny), (maxx, maxy) axA1.update_datalim(corners) if config['return_calculated_values'] is not None: retdict = config['return_calculated_values'] if ptype in VALID_PMOVE_TYPES: prekey = ptype retdict[prekey + '_bricks'] = brick_values retdict[prekey + '_dates'] = mdates.num2date(new_dates) retdict[prekey + '_size'] = size if config['volume']: retdict[prekey + '_volumes'] = volumes if mavgs is not None: for i in range(0, len(mavgs)): retdict['mav' + str(mavgs[i])] = mavprices retdict['minx'] = minx retdict['maxx'] = maxx retdict['miny'] = miny retdict['maxy'] = maxy # Note: these are NOT mutually exclusive, so the order of this # if/elif is important: VALID_PMOVE_TYPES must be first. if ptype in VALID_PMOVE_TYPES: dtix = pd.DatetimeIndex([dt for dt in mdates.num2date(new_dates)]) elif not config['show_nontrading']: dtix = data.index else: dtix = None line_collections = [] line_collections.append( _construct_aline_collections(config['alines'], dtix)) line_collections.append( _construct_hline_collections(config['hlines'], minx, maxx)) line_collections.append( _construct_vline_collections(config['vlines'], dtix, miny, maxy)) tlines = config['tlines'] if isinstance(tlines, (list, tuple)) and all( [isinstance(item, dict) for item in tlines]): pass else: tlines = [ tlines, ] for tline_item in tlines: line_collections.append( _construct_tline_collections(tline_item, dtix, dates, opens, highs, lows, closes)) for collection in line_collections: if collection is not None: axA1.add_collection(collection) if config['volume']: vup, vdown = style['marketcolors']['volume'].values() #-- print('vup,vdown=',vup,vdown) vcolors = _updown_colors( vup, vdown, opens, closes, use_prev_close=style['marketcolors']['vcdopcod']) #-- print('len(vcolors),len(opens),len(closes)=',len(vcolors),len(opens),len(closes)) #-- print('vcolors=',vcolors) width = 0.5 * avg_dist_between_points volumeAxes.bar(xdates, volumes, width=width, color=vcolors) miny = 0.3 * np.nanmin(volumes) maxy = 1.1 * np.nanmax(volumes) volumeAxes.set_ylim(miny, maxy) xrotation = config['xrotation'] _adjust_ticklabels_per_bottom_panel(axA1, axB1, axC1, actual_order, hb, hc, formatter, xrotation) used_axA2 = False used_axB2 = False used_axC2 = False addplot = config['addplot'] if addplot is not None and ptype not in VALID_PMOVE_TYPES: # Calculate the Order of Magnitude Range # If addplot['secondary_y'] == 'auto', then: If the addplot['data'] # is out of the Order of Magnitude Range, then use secondary_y. # Calculate omrange for Main panel, and for Lower (volume) panel: lo = math.log(max(math.fabs(np.nanmin(lows)), 1e-7), 10) - 0.5 hi = math.log(max(math.fabs(np.nanmax(highs)), 1e-7), 10) + 0.5 # May 2020: Main panel is now called 'A', and Lower is called 'B' omrange = {'A': {'lo': lo, 'hi': hi}, 'B': None, 'C': None} if config['volume']: lo = math.log(max(math.fabs(np.nanmin(volumes)), 1e-7), 10) - 0.5 hi = math.log(max(math.fabs(np.nanmax(volumes)), 1e-7), 10) + 0.5 omrange.update(B={'lo': lo, 'hi': hi}) if isinstance(addplot, dict): addplot = [ addplot, ] # make list of dict to be consistent elif not _list_of_dict(addplot): raise TypeError('addplot must be `dict`, or `list of dict`, NOT ' + str(type(addplot))) for apdict in addplot: apdata = apdict['data'] if isinstance(apdata, list) and not isinstance(apdata[0], (float, int)): raise TypeError('apdata is list but NOT of float or int') if isinstance(apdata, pd.DataFrame): havedf = True else: havedf = False # must be a single series or array apdata = [ apdata, ] # make it iterable for column in apdata: if havedf: ydata = apdata.loc[:, column] else: ydata = column yd = [y for y in ydata if not math.isnan(y)] ymhi = math.log(max(math.fabs(np.nanmax(yd)), 1e-7), 10) ymlo = math.log(max(math.fabs(np.nanmin(yd)), 1e-7), 10) secondary_y = False if apdict['secondary_y'] == 'auto': if apdict['panel'] == 'lower' or apdict['panel'] == 'B': # If omrange['lower'] is not yet set by volume, # then set it here as this is the first ydata # to be plotted on the lower panel, so consider # it to be the 'primary' lower panel axis. if omrange['B'] is None: omrange.update(B={'lo': ymlo, 'hi': ymhi}) elif ymlo < omrange['B']['lo'] or ymhi > omrange['B'][ 'hi']: secondary_y = True elif apdict['panel'] == 'C': if omrange['C'] is None: omrange.update(B={'lo': ymlo, 'hi': ymhi}) elif ymlo < omrange['C']['lo'] or ymhi > omrange['C'][ 'hi']: secondary_y = True elif ymlo < omrange['A']['lo'] or ymhi > omrange['A']['hi']: secondary_y = True # if secondary_y: # print('auto says USE secondary_y') # else: # print('auto says do NOT use secondary_y') else: secondary_y = apdict['secondary_y'] # print("apdict['secondary_y'] says secondary_y is",secondary_y) if apdict['panel'] == 'lower' or apdict['panel'] == 'B': ax = axB2 if secondary_y else axB1 elif apdict['panel'] == 'C': ax = axC2 if secondary_y else axC1 else: ax = axA2 if secondary_y else axA1 if ax == axA2: used_axA2 = True if ax == axB2: used_axB2 = True if ax == axC2: used_axC2 = True aptype = apdict['type'] if aptype == 'scatter': size = apdict['markersize'] mark = apdict['marker'] color = apdict['color'] # -------------------------------------------------------- # # This fixes Issue#77, but breaks other stuff: # ax.set_ylim(ymin=(miny - 0.4*stdy),ymax=(maxy + 0.4*stdy)) # -------------------------------------------------------- # ax.scatter(xdates, ydata, s=size, marker=mark, color=color) elif aptype == 'bar': width = apdict['width'] bottom = apdict['bottom'] color = apdict['color'] alpha = apdict['alpha'] ax.bar(xdates, ydata, width=width, bottom=bottom, color=color, alpha=alpha) elif aptype == 'line': ls = apdict['linestyle'] color = apdict['color'] ax.plot(xdates, ydata, linestyle=ls, color=color) #elif aptype == 'ohlc' or aptype == 'candle': # This won't work as is, because here we are looping through one column at a time # and mpf_collections needs ohlc columns: # collections =_construct_mpf_collections(aptype,dates,xdates,opens,highs,lows,closes,volumes,config,style) # if len(collections) == 1: collections = [collections] # for collection in collections: # ax.add_collection(collection) else: raise ValueError('addplot type "' + str(aptype) + '" NOT yet supported.') if config['set_ylim_panelB'] is not None: miny = config['set_ylim_panelB'][0] maxy = config['set_ylim_panelB'][1] axB1.set_ylim(miny, maxy) if config['set_ylim_panelC'] is not None: miny = config['set_ylim_panelC'][0] maxy = config['set_ylim_panelC'][1] axC1.set_ylim(miny, maxy) if config['yscale'] is not None: yscale = config['yscale'] panel = 'A' kwargs = None if isinstance(yscale, dict): if 'panel' in yscale: panel = yscale['panel'] if 'kwargs' in yscale: kwargs = yscale['kwargs'] yscale = yscale['yscale'] ax = internalAxes[panel][0] if kwargs is not None: ax.set_yscale(yscale, **kwargs) else: ax.set_yscale(yscale) # put the twinx() on the "other" side: if style['y_on_right']: axA1.yaxis.set_label_position('right') axA1.yaxis.tick_right() axA2.yaxis.set_label_position('left') axA2.yaxis.tick_left() if axB1 and axB2: axB1.yaxis.set_label_position('right') axB1.yaxis.tick_right() if axB2 != axB1: axB2.yaxis.set_label_position('left') axB2.yaxis.tick_left() else: axA1.yaxis.set_label_position('left') axA1.yaxis.tick_left() axA2.yaxis.set_label_position('right') axA2.yaxis.tick_right() if axB1 and axB2: axB1.yaxis.set_label_position('left') axB1.yaxis.tick_left() if axB2 != axB1: axB2.yaxis.set_label_position('right') axB2.yaxis.tick_right() # TODO: ================================================================ # TODO: Investigate: # TODO: =========== # TODO: It appears to me that there may be some or significant overlap # TODO: between what the following functions actually do: # TODO: At the very least, all four of them appear to communicate # TODO: to matplotlib that the xaxis should be treated as dates: # TODO: -> 'ax.autoscale_view()' # TODO: -> 'ax.xaxis_dates()' # TODO: -> 'plt.autofmt_xdates()' # TODO: -> 'fig.autofmt_xdate()' # TODO: ================================================================ #if config['autofmt_xdate']: #print('CALLING fig.autofmt_xdate()') #fig.autofmt_xdate() axA1.autoscale_view() # Is this really necessary?? axA1.set_ylabel(config['ylabel']) if config['volume']: volumeAxes.figure.canvas.draw() # This is needed to calculate offset offset = volumeAxes.yaxis.get_major_formatter().get_offset() volumeAxes.yaxis.offsetText.set_visible(False) if len(offset) > 0: offset = (' x ' + offset) if config['ylabel_lower'] is None: vol_label = 'Volume' + offset else: if len(offset) > 0: offset = '\n' + offset vol_label = config['ylabel_lower'] + offset volumeAxes.set_ylabel(vol_label) if config['title'] is not None: fig.suptitle(config['title'], size='x-large', weight='semibold') if not used_axA2 and axA2 is not None: axA2.get_yaxis().set_visible(False) if not used_axB2 and axB2 is not None: axB2.get_yaxis().set_visible(False) if not used_axC2 and axC2 is not None: axC2.get_yaxis().set_visible(False) axlist = [axA1, axA2] if axB1: axlist.append(axB1) if axB2: axlist.append(axB2) if axC1: axlist.append(axC1) if axC2: axlist.append(axC2) if config['axesoffdark']: fig.patch.set_facecolor('black') if config['axesoff']: fig.patch.set_visible(False) if config['axesoffdark'] or config['axesoff']: for ax in axlist: ax.set_xlim(xdates[0], xdates[-1]) ax.set_axis_off() if config['savefig'] is not None: save = config['savefig'] if isinstance(save, dict): plt.savefig(**save) else: plt.savefig(save) if config['closefig']: plt.close(fig) elif not config['returnfig']: plt.show(block=config['block'] ) # https://stackoverflow.com/a/13361748/1639359 if config['block']: plt.close(fig) if config['returnfig']: return (fig, axlist)
def fmtfunc(x, pos=None): d = num2date(x) out = d.strftime('%H:%M:%S.') out += format(np.round(d.microsecond / 1000, 1), '03.0f') return out
def test_auto_date_locator_intmult_tz(): def _create_auto_date_locator(date1, date2, tz): locator = mdates.AutoDateLocator(interval_multiples=True, tz=tz) locator.create_dummy_axis() locator.set_view_interval(mdates.date2num(date1), mdates.date2num(date2)) return locator results = ([datetime.timedelta(weeks=52*200), ['1980-01-01 00:00:00-08:00', '2000-01-01 00:00:00-08:00', '2020-01-01 00:00:00-08:00', '2040-01-01 00:00:00-08:00', '2060-01-01 00:00:00-08:00', '2080-01-01 00:00:00-08:00', '2100-01-01 00:00:00-08:00', '2120-01-01 00:00:00-08:00', '2140-01-01 00:00:00-08:00', '2160-01-01 00:00:00-08:00', '2180-01-01 00:00:00-08:00', '2200-01-01 00:00:00-08:00'] ], [datetime.timedelta(weeks=52), ['1997-01-01 00:00:00-08:00', '1997-02-01 00:00:00-08:00', '1997-03-01 00:00:00-08:00', '1997-04-01 00:00:00-08:00', '1997-05-01 00:00:00-07:00', '1997-06-01 00:00:00-07:00', '1997-07-01 00:00:00-07:00', '1997-08-01 00:00:00-07:00', '1997-09-01 00:00:00-07:00', '1997-10-01 00:00:00-07:00', '1997-11-01 00:00:00-08:00', '1997-12-01 00:00:00-08:00'] ], [datetime.timedelta(days=141), ['1997-01-01 00:00:00-08:00', '1997-01-22 00:00:00-08:00', '1997-02-01 00:00:00-08:00', '1997-02-22 00:00:00-08:00', '1997-03-01 00:00:00-08:00', '1997-03-22 00:00:00-08:00', '1997-04-01 00:00:00-08:00', '1997-04-22 00:00:00-07:00', '1997-05-01 00:00:00-07:00', '1997-05-22 00:00:00-07:00'] ], [datetime.timedelta(days=40), ['1997-01-01 00:00:00-08:00', '1997-01-05 00:00:00-08:00', '1997-01-09 00:00:00-08:00', '1997-01-13 00:00:00-08:00', '1997-01-17 00:00:00-08:00', '1997-01-21 00:00:00-08:00', '1997-01-25 00:00:00-08:00', '1997-01-29 00:00:00-08:00', '1997-02-01 00:00:00-08:00', '1997-02-05 00:00:00-08:00', '1997-02-09 00:00:00-08:00'] ], [datetime.timedelta(hours=40), ['1997-01-01 00:00:00-08:00', '1997-01-01 04:00:00-08:00', '1997-01-01 08:00:00-08:00', '1997-01-01 12:00:00-08:00', '1997-01-01 16:00:00-08:00', '1997-01-01 20:00:00-08:00', '1997-01-02 00:00:00-08:00', '1997-01-02 04:00:00-08:00', '1997-01-02 08:00:00-08:00', '1997-01-02 12:00:00-08:00', '1997-01-02 16:00:00-08:00'] ], [datetime.timedelta(minutes=20), ['1997-01-01 00:00:00-08:00', '1997-01-01 00:05:00-08:00', '1997-01-01 00:10:00-08:00', '1997-01-01 00:15:00-08:00', '1997-01-01 00:20:00-08:00'] ], [datetime.timedelta(seconds=40), ['1997-01-01 00:00:00-08:00', '1997-01-01 00:00:05-08:00', '1997-01-01 00:00:10-08:00', '1997-01-01 00:00:15-08:00', '1997-01-01 00:00:20-08:00', '1997-01-01 00:00:25-08:00', '1997-01-01 00:00:30-08:00', '1997-01-01 00:00:35-08:00', '1997-01-01 00:00:40-08:00'] ] ) tz = dateutil.tz.gettz('Canada/Pacific') d1 = datetime.datetime(1997, 1, 1, tzinfo=tz) for t_delta, expected in results: with rc_context({'_internal.classic_mode': False}): d2 = d1 + t_delta locator = _create_auto_date_locator(d1, d2, tz) st = list(map(str, mdates.num2date(locator(), tz=tz))) assert st == expected
lon_pom = np.asarray(pom_grid['east_e'][:]) lat_pom = np.asarray( pom_grid['north_e'][:]) zlevc = np.asarray(pom_grid['zz'][:]) topoz = np.asarray(pom_grid['h'][:]) #%% Getting list of POM files ncfiles = sorted(glob.glob(os.path.join(folder_hwrf_pom,'*pom.0*.nc'))) # Reading POM time time_pom = [] for i,file in enumerate(ncfiles): print(i) pom = xr.open_dataset(file) tpom = pom['time'][:] timestamp_pom = date2num(tpom)[0] time_pom.append(num2date(timestamp_pom)) time_POM = np.asarray(time_pom) oklon = np.round(np.interp(lon_buoy,lon_pom[0,:],np.arange(len(lon_pom[0,:])))).astype(int) oklat = np.round(np.interp(lat_buoy,lat_pom[:,0],np.arange(len(lat_pom[:,0])))).astype(int) topoz_pom = np.asarray(topoz[oklat,oklon]) zmatrix_POM = np.dot(topoz_pom.reshape(-1,1),zlevc.reshape(1,-1)).T prof_temp_POM = np.empty((len(time_POM),zmatrix_POM.shape[0])) prof_temp_POM[:] = np.nan max_valt = 26 min_valt = 8 nlevelst = max_valt - min_valt + 1
def _process(self, element, key=None): try: from matplotlib.contour import QuadContourSet from matplotlib.axes import Axes from matplotlib.figure import Figure from matplotlib.dates import num2date, date2num except ImportError: raise ImportError("contours operation requires matplotlib.") extent = element.range(0) + element.range(1)[::-1] xs = element.dimension_values(0, True, flat=False) ys = element.dimension_values(1, True, flat=False) zs = element.dimension_values(2, flat=False) # Ensure that coordinate arrays specify bin centers if xs.shape[0] != zs.shape[0]: xs = xs[:-1] + np.diff(xs, axis=0) / 2. if xs.shape[1] != zs.shape[1]: xs = xs[:, :-1] + (np.diff(xs, axis=1) / 2.) if ys.shape[0] != zs.shape[0]: ys = ys[:-1] + np.diff(ys, axis=0) / 2. if ys.shape[1] != zs.shape[1]: ys = ys[:, :-1] + (np.diff(ys, axis=1) / 2.) data = (xs, ys, zs) # if any data is a datetime, transform to matplotlib's numerical format data_is_datetime = tuple(isdatetime(arr) for k, arr in enumerate(data)) if any(data_is_datetime): data = tuple( date2num(d) if is_datetime else d for d, is_datetime in zip(data, data_is_datetime)) xdim, ydim = element.dimensions('key', label=True) if self.p.filled: contour_type = Polygons else: contour_type = Contours vdims = element.vdims[:1] kwargs = {} levels = self.p.levels zmin, zmax = element.range(2) if isinstance(self.p.levels, int): if zmin == zmax: contours = contour_type([], [xdim, ydim], vdims) return (element * contours) if self.p.overlaid else contours data += (levels, ) else: kwargs = {'levels': levels} fig = Figure() ax = Axes(fig, [0, 0, 1, 1]) contour_set = QuadContourSet(ax, *data, filled=self.p.filled, extent=extent, **kwargs) levels = np.array(contour_set.get_array()) crange = levels.min(), levels.max() if self.p.filled: levels = levels[:-1] + np.diff(levels) / 2. vdims = [vdims[0].clone(range=crange)] paths = [] empty = np.array([[np.nan, np.nan]]) for level, cset in zip(levels, contour_set.collections): exteriors = [] interiors = [] for geom in cset.get_paths(): interior = [] polys = geom.to_polygons(closed_only=False) for ncp, cp in enumerate(polys): if any(data_is_datetime[0:2]): # transform x/y coordinates back to datetimes xs, ys = np.split(cp, 2, axis=1) if data_is_datetime[0]: xs = np.array(num2date(xs)) if data_is_datetime[1]: ys = np.array(num2date(ys)) cp = np.concatenate((xs, ys), axis=1) if ncp == 0: exteriors.append(cp) exteriors.append(empty) else: interior.append(cp) if len(polys): interiors.append(interior) if not exteriors: continue geom = { element.vdims[0].name: num2date(level) if data_is_datetime[2] else level, (xdim, ydim): np.concatenate(exteriors[:-1]) } if self.p.filled and interiors: geom['holes'] = interiors paths.append(geom) contours = contour_type(paths, label=element.label, kdims=element.kdims, vdims=vdims) if self.p.overlaid: contours = element * contours return contours
Daymet_ELM_gridmatching(elmnodex, elmnodey, \ daymet_lon, daymet_lat, \ Grid1ifxy=True, Grid2ifxy=True, \ Grid1_cells=elm_lndij) # need ALL time-values from ELM output, but only calculated once yr_elm = np.int32(elm_daynums / 365.0) doy_elm = elm_daynums - np.int32( elm_daynums / 365.0) * 365.0 + 1.0 # done with if (ncfile is the first of alldirfiles) for daymet_it in range(tt[0:].size): # match YEAR/DOY btw 'vdata' and 'elm_vdata' (NOT date/time due to no_leap in ELM time) # note: Daymet uses leap_year system, but remove doy of 366 data to keep same length of days in a yearr print('Time for ' + ncfile + ' : ' + str(daymet_it)) date_tt = num2date(tt[daymet_it]).date() yr_tt = date_tt.year doy_tt = np.floor(tt[daymet_it] - date2num(date(yr_tt, 1, 1)) + 1) elm_it = np.squeeze( np.where((yr_elm == yr_tt) & (doy_elm == doy_tt))) if (elm_it.size > 0): elm_it_all = np.hstack((elm_it_all, elm_it)) # timer count daymet_it_all = np.hstack( (daymet_it_all, daymet_it)) # timer count daynums_all = np.hstack((daynums_all, tt[daymet_it])) # timer # data in DAYMET grid-cells vdata_it = np.float32(vdata[daymet_it, ]) daymetlndmask = ~np.isnan( vdata_it) # mask land-cells (or, nc variable._FillValue)
def plot_clusters(self, data_dict, clust_flg, scans, name, vel_max=200, vel_step=25, show=True, save=False, base_filepath=""): unique_clusters = np.unique(np.hstack(clust_flg)) noise = -1 in unique_clusters cluster_cmap = get_cluster_cmap(len(unique_clusters), noise) cluster_colors = np.array(cluster_cmap(range(cluster_cmap.N))) vel_ranges = list(range(-vel_max, vel_max + 1, vel_step)) vel_ranges.insert(0, -9999) vel_ranges.append(9999) vel_cmap = plt.cm.jet # use "viridis" colormap to make this redgreen colorblind proof vel_colors = vel_cmap(np.linspace(0, 1, len(vel_ranges))) for i in scans: fig = plt.figure(figsize=(16, 9)) clust_ax = self.add_axis(fig, 121) clust_i = np.unique(clust_flg[i]).astype(int) # Cluster fanplot for ci, c in enumerate(clust_i): clust_mask = clust_flg[i] == c beam_c = data_dict["beam"][i][clust_mask] gate_c = data_dict["gate"][i][clust_mask] color = cluster_colors[(c + 1) % len(cluster_colors)] if c != -1: m = int( len(beam_c) / 2 ) # Beam is sorted, so this is roughly the index of the median beam self.text(str(c), beam_c[m], gate_c[m]) # Label cluster # self.plot(clust_ax, beam_c, gate_c, color) clust_ax.set_title("Clusters") # Velocity fanplot vel_ax = self.add_axis(fig, 122) for s in range(len(vel_ranges) - 1): step_mask = (data_dict["vel"][i] >= vel_ranges[s]) & (data_dict["vel"][i] <= (vel_ranges[s + 1])) beam_s = data_dict["beam"][i][step_mask] gate_s = data_dict["gate"][i][step_mask] self.plot(vel_ax, beam_s, gate_s, vel_colors[s]) self._add_colorbar(fig, vel_ax, vel_ranges, vel_cmap, label="Velocity [m/s]") vel_ax.set_title("Velocity") # Add title scan_time = num2date(data_dict["time"][i][0]).strftime("%H:%M:%S") plt.suptitle("\n\n%sscan time %s" % (name, scan_time)) if save: filepath = "%s_%s.jpg" % (base_filepath, scan_time) plt.savefig(filepath) if show: plt.show() fig.clf() plt.close()
def get_glider_transect_from_DOPPIO(url_doppio,timeg,long,latg): # Read Doppio time, lat and lon print('Retrieving coordinates and time from Doppio ') doppio = xr.open_dataset(url_doppio,decode_times=False) latrhodoppio = np.asarray(doppio.variables['lat_rho'][:]) lonrhodoppio = np.asarray(doppio.variables['lon_rho'][:]) srhodoppio = np.asarray(doppio.variables['s_rho'][:]) ttdoppio = doppio.variables['time'][:] tdoppio = netCDF4.num2date(ttdoppio[:],ttdoppio.attrs['units']) # Read Doppio S-coordinate parameters Vtransf = np.asarray(doppio.variables['Vtransform']) #Vstrect = np.asarray(doppio.variables['Vstretching']) Cs_r = np.asarray(doppio.variables['Cs_r']) #Cs_w = np.asarray(doppio.variables['Cs_w']) sc_r = np.asarray(doppio.variables['s_rho']) #sc_w = np.asarray(doppio.variables['s_w']) # depth h = np.asarray(doppio.variables['h']) # critical depth parameter hc = np.asarray(doppio.variables['hc']) igrid = 1 # Narrowing time window of Doppio to coincide with glider time window tmin = mdates.num2date(mdates.date2num(timeg[0])) tmax = mdates.num2date(mdates.date2num(timeg[-1])) oktime_doppio = np.where(np.logical_and(mdates.date2num(tdoppio) >= mdates.date2num(tmin),\ mdates.date2num(tdoppio) <= mdates.date2num(tmax))) timedoppio = tdoppio[oktime_doppio] # Changing times to timestamp tstamp_glider = [mdates.date2num(timeg[i]) for i in np.arange(len(timeg))] tstamp_doppio = [mdates.date2num(timedoppio[i]) for i in np.arange(len(timedoppio))] # interpolating glider lon and lat to lat and lon on doppio time sublondoppio = np.interp(tstamp_doppio,tstamp_glider,long) sublatdoppio = np.interp(tstamp_doppio,tstamp_glider,latg) # getting the model grid positions for sublonm and sublatm oklatdoppio = np.empty((len(oktime_doppio[0]))) oklatdoppio[:] = np.nan oklondoppio= np.empty((len(oktime_doppio[0]))) oklondoppio[:] = np.nan for t,tt in enumerate(oktime_doppio[0]): # search in xi_rho direction oklatmm = [] oklonmm = [] for pos_xi in np.arange(latrhodoppio.shape[1]): pos_eta = np.round(np.interp(sublatdoppio[t],latrhodoppio[:,pos_xi],np.arange(len(latrhodoppio[:,pos_xi])),\ left=np.nan,right=np.nan)) if np.isfinite(pos_eta): oklatmm.append((pos_eta).astype(int)) oklonmm.append(pos_xi) pos = np.round(np.interp(sublondoppio[t],lonrhodoppio[oklatmm,oklonmm],np.arange(len(lonrhodoppio[oklatmm,oklonmm])))).astype(int) oklatdoppio1 = oklatmm[pos] oklondoppio1 = oklonmm[pos] #search in eta-rho direction oklatmm = [] oklonmm = [] for pos_eta in np.arange(latrhodoppio.shape[0]): pos_xi = np.round(np.interp(sublondoppio[t],lonrhodoppio[pos_eta,:],np.arange(len(lonrhodoppio[pos_eta,:])),\ left=np.nan,right=np.nan)) if np.isfinite(pos_xi): oklatmm.append(pos_eta) oklonmm.append(pos_xi.astype(int)) pos_lat = np.round(np.interp(sublatdoppio[t],latrhodoppio[oklatmm,oklonmm],np.arange(len(latrhodoppio[oklatmm,oklonmm])))).astype(int) oklatdoppio2 = oklatmm[pos_lat] oklondoppio2 = oklonmm[pos_lat] #check for minimum distance dist1 = np.sqrt((oklondoppio1-sublondoppio[t])**2 + (oklatdoppio1-sublatdoppio[t])**2) dist2 = np.sqrt((oklondoppio2-sublondoppio[t])**2 + (oklatdoppio2-sublatdoppio[t])**2) if dist1 >= dist2: oklatdoppio[t] = oklatdoppio1 oklondoppio[t] = oklondoppio1 else: oklatdoppio[t] = oklatdoppio2 oklondoppio[t] = oklondoppio2 oklatdoppio = oklatdoppio.astype(int) oklondoppio = oklondoppio.astype(int) # Getting glider transect from doppio print('Getting glider transect from Doppio') target_tempdoppio = np.empty((len(srhodoppio),len(oktime_doppio[0]))) target_tempdoppio[:] = np.nan target_saltdoppio = np.empty((len(srhodoppio),len(oktime_doppio[0]))) target_saltdoppio[:] = np.nan target_zdoppio = np.empty((len(srhodoppio),len(oktime_doppio[0]))) target_zdoppio[:] = np.nan for i in range(len(oktime_doppio[0])): print(len(oktime_doppio[0]),' ',i) target_tempdoppio[:,i] = np.flip(doppio.variables['temp'][oktime_doppio[0][i],:,oklatdoppio[i],oklondoppio[i]]) target_saltdoppio[:,i] = np.flip(doppio.variables['salt'][oktime_doppio[0][i],:,oklatdoppio[i],oklondoppio[i]]) h = np.asarray(doppio.variables['h'][oklatdoppio[i],oklondoppio[i]]) zeta = np.asarray(doppio.variables['zeta'][oktime_doppio[0][i],oklatdoppio[i],oklondoppio[i]]) # Calculate doppio depth as a function of time if Vtransf ==1: if igrid == 1: for k in np.arange(sc_r.shape[0]): z0 = (sc_r[k]-Cs_r[k])*hc + Cs_r[k]*h target_zdoppio[k,i] = z0 + zeta * (1.0 + z0/h); if Vtransf == 2: if igrid == 1: for k in np.arange(sc_r.shape[0]): z0 = (hc*sc_r[k] + Cs_r[k]*h) / (hc+h) target_zdoppio[k,i] = zeta + (zeta+h)*z0 target_zdoppio[:,i] = np.flip(target_zdoppio[:,i]) # change time vector to matrix target_timedoppio = np.tile(timedoppio,(len(srhodoppio),1)) return(target_tempdoppio,target_saltdoppio,target_zdoppio,target_timedoppio)
"INCREMENT"] tint = f["/"].attrs["TOTAL_INTEGRATION_TIME"] t_start_bf = datetime.datetime.strptime( group.attrs["OBSERVATION_START_UTC"].decode("utf-8")[0:26] + ' +0000', '%Y-%m-%dT%H:%M:%S.%f %z') t_end_bf = datetime.datetime.strptime( group.attrs["OBSERVATION_END_UTC"].decode("utf-8")[0:26] + ' +0000', '%Y-%m-%dT%H:%M:%S.%f %z') # get the frequency axies freq = f["/SUB_ARRAY_POINTING_000/BEAM_000/COORDINATES/COORDINATE_1"].attrs[ "AXIS_VALUES_WORLD"] / 1e6 if chop_off: t_start_chunk = mdates.num2date( (np.ceil(mdates.date2num(t_start_bf) * 24 * 4.)) / 4 / 24) else: t_start_chunk = t_start_bf chunk_num = ((t_end_bf - t_start_chunk) / chunk_t) freq_select_idx = np.int32(np.linspace(0, f_lines - 1, y_points)) f_fits = freq[freq_select_idx] for idx_cur in np.arange(int(chunk_num)): printProgressBar(idx_cur + 1, int(chunk_num), prefix='Progress:', suffix='Complete', length=50)
def should_buy(dataset): len = 120 subset = dataset[-len:] short_his = subset[-5:].copy() short_his['amp'] = short_his['high'] - short_his['low'] ma_amp = short_his['amp'].mean() epsilon = ma_amp*1 epsilon_2 = subset['close'].iloc[-1]*0.03 points = find_turn_points(subset, epsilon) decision = False fuzzy_range = 0.03 fuzzy_range_low = 0.015 close = price = subset['close'].iloc[-1] low = subset['low'].iloc[-1] open = subset['open'].iloc[-1] date = subset.iloc[-1].name prev_open = subset['open'].iloc[-2] prev_close = subset['close'].iloc[-2] prev_change = dataset['change'].iloc[-2] buy_signal_count = 0 v_pos = (price - subset['close'].min()) / (subset['close'].max() - subset['close'].min()) change = dataset['change'].iloc[-1] bottom_points = points[(points.direction=='up')] top_points = points[(points.direction=='down')] # not enough data if points.shape[0]<4: if os.environ['DEBUG']=='ON':print('No enough data') return False if points['direction'].iloc[-2]=='down': last_down = (points['price'].iloc[-2] - points['price'].iloc[-1]) / points['price'].iloc[-2] last_up = (points['price'].iloc[-2] - points['price'].iloc[-3]) / points['price'].iloc[-2] prev_down = (points['price'].iloc[-4] - points['price'].iloc[-3]) / points['price'].iloc[-4] since_days = int(points['num_date'].iloc[-1] - points['num_date'].iloc[-2]) # 下降坡段 pos = 1 max_down = subset['change'][-since_days:].min() if last_down<0.03 \ and max_down>-0.04 \ and since_days<9 \ and since_days>5 : decision = True if os.environ['DEBUG']=='ON': print('{:.10} not droping so much for {:.0f} days at v_pos: {:.2f} max_d:{:.2f}'.format(str(date),since_days,v_pos,max_down)) if v_pos>0 and v_pos<=0.05: decision = True if os.environ['DEBUG']=='ON': print('{:.10} Cheap enough vpos:{:.2f}'.format(str(date),since_days,v_pos)) if last_down > last_up*2 and v_pos<0.1: decision = True if os.environ['DEBUG']=='ON': print('{:.10} try to catch the bottom days:{} vpos:{:.2f}'.format(str(date),since_days,v_pos)) if (last_down>0.06) \ or prev_down>0.25: #最后一次的下跌空间要够 if last_up<0.15 and last_down>0.2 and v_pos>0.2 and v_pos<0.4: fuzzy_range=0.05 decision = True if os.environ['DEBUG']=='ON':print('got it 2') if v_pos>0.45 or v_pos<0.2: fuzzy_range=0.025 if last_up>0.2 and last_down>0.1: fuzzy_range=0.02 decision = True if os.environ['DEBUG']=='ON':print('got it') else: #下跌幅度不够,往下看支撑位 if (last_down<0.1 and prev_down<0.25): fuzzy_range=0.01 support_points = points[(points.direction=='up')] support_points = support_points.sort_values(by=["num_date"], ascending=False) while(support_points.shape[0]>0): point = support_points['price'].iloc[0] num_date = support_points['num_date'].iloc[0] date = mdates.num2date(num_date) support_since_days = int(dataset['num_date'].iloc[-1] - num_date) support_points = support_points[support_points.price<point].sort_values(by=["num_date"], ascending=False) if os.environ['DEBUG']=='ON': print("{:.10}\t p:{:.2f}\t scope: {:.2f} - {:.2f} since {} days\t last_down:{:.2f}/{:.2f} fuzzy_range:{:.2f}/{:.2f}".format(str(date), price, point*(1-fuzzy_range_low), point*(1+fuzzy_range),support_since_days,last_down,prev_down,fuzzy_range, fuzzy_range_low )) if (point*(1+fuzzy_range) > price and point*(1-fuzzy_range_low) < price) \ or (point*(1+fuzzy_range) > low and point*(1-fuzzy_range_low) < low): if support_since_days<60 : buy_signal_count +=1 if os.environ['DEBUG']=='ON': print ("^ signal at {} days ago".format(support_since_days)) break pos += 1 if buy_signal_count>0: # if subset['close'][-5:].min()*0.99 < low: decision = True # 说明下跌无力 if (last_down<0.01 and v_pos<0.2): decision = True if (last_down>0.25 and v_pos<0.1): decision = True # 阴线反包赶紧扔 if dataset['change'].iloc[-3]>0.01 and \ (prev_change>0.01 or change<-0.03 ) and \ (change <0 and prev_change>0) and \ (open > prev_close) and \ (price < prev_open): if os.environ['DEBUG']=='ON': print("{:.10} 阴线反包 不能买".format( str(dataset.iloc[-1].name))) decision = False if (dataset['close'][-5:].max() - price)/dataset['close'][-5:].max() <= 0.035 \ and v_pos <0.3 and since_days>=3: if os.environ['DEBUG']=='ON': print("{:.10} not droping so much".format( str(dataset.iloc[-1].name))) decision = True if since_days==1 and v_pos<0.35 and change<-0.06: decision = True if decision == True and since_days==1 and change < -0.09: decision = False if os.environ['DEBUG']=='ON': print('{:.10}\t buy: {} \tsignal: {} \tdown: {:.3f}/{:.3f} \tup:{:.3f}\t v_pos:{:.2f}\t d:{}\tdays:{}'\ .format(str(subset.iloc[-1].name), decision,buy_signal_count,last_down,prev_down,last_up,v_pos,points['direction'].iloc[-2],since_days)) if points['direction'].iloc[-2]=='up': last_down = (points['price'].iloc[-3] - points['price'].iloc[-2]) / points['price'].iloc[-3] last_up = (points['price'].iloc[-1] - points['price'].iloc[-2]) / points['price'].iloc[-2] if bottom_points.shape[0]>=2 \ and (bottom_points['price'].iloc[-2] < bottom_points['price'].iloc[-1] ) \ and v_pos < 0.4 and last_up<0.03: decision = True # 阳线反包 追着买入 if open < price*1.005 \ and prev_open > prev_close*1.005 \ and open < prev_close \ and price > prev_open \ and close > open*1.025 \ and change<0.07 and last_up<0.15: if os.environ['DEBUG']=='ON': print('Grow line hugging down line') decision = True # 前面是大绿柱 两根阳线收复绿柱 80% if dataset['change'].iloc[-3]<-0.04 \ and prev_change < abs(dataset['change'].iloc[-3]) \ and change+prev_change > abs(dataset['change'].iloc[-3])*0.8: if os.environ['DEBUG']=='ON': print('Recovered big green bar') decision = True if os.environ['DEBUG']=='ON': print('{:.10}\t buy: {} \tsignal: {} \tdown: {:.3f}/000 \tup:{:.3f}\t v_pos:{:.2f}\t d:{}'\ .format(str(subset.iloc[-1].name), decision,buy_signal_count,last_down,last_up,v_pos,points['direction'].iloc[-2])) max_drop = (dataset['high'][-240:].max() - low )/dataset['high'][-240:].max() if max_drop > 0.58 and price>open: if os.environ['DEBUG']=='ON': print("240 max_drop:",max_drop) decision = True if max_drop > 0.65 and price<open: if os.environ['DEBUG']=='ON': print("240 65%off max_drop:",max_drop) decision = True max_drop = (dataset['high'][-60:].max() - low )/dataset['high'][-60:].max() if max_drop > 0.48 and price>open and last_up<0.1: if os.environ['DEBUG']=='ON': print("60 max_drop:",max_drop) decision = True # 判断是否应该忽略这次购买信号 if decision == True: # 忽略 比如箱体横盘太久了 if bottom_points.shape[0]>=2 \ and ((bottom_points['price'].iloc[-2] > bottom_points['price'].iloc[-1] ) \ and (top_points['price'].iloc[-2] > top_points['price'].iloc[-1])) \ and v_pos > 0.3: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - down trend') decision = False # 忽略 向下有跳空 if open < price*1.005 \ and price < prev_close \ and prev_change < -0.05: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - jump down') decision = False if open > price*1.005 \ and prev_close*0.99 > open \ and (prev_change+change) < -0.05: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - jump down v2') decision = False # 忽略下跌幅度不够 if v_pos == 0 and since_days > 10 and last_down < 0.25: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - Not droping enough yet') decision = False # 不跟跌停 if abs((price - open) /open) > 0.07: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - After big drop', (price - open) /open) decision = False # 跌得太多 反弹太小 if change < 0.02 \ and (prev_change<0 and dataset['change'].iloc[-3]<0) \ and abs(prev_change+dataset['change'].iloc[-3]) > 0.075: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - recover to little') decision = False # 两阴夹一阳 先别买 if change < -0.03 \ and prev_change < 0.03 \ and dataset['change'].iloc[-3] < -0.03: if os.environ['DEBUG']=='ON': print('Ignore Buy decision - 2 black bar hugging one red bar') decision = False # 阴线孕育阴线 if prev_change<-0.02 and change>0 \ and prev_close < open and prev_open > close \ and open > close: if os.environ['DEBUG']=='ON': print("black bar contains black bar YunXian") decision = False # 判断是否阴线孕育阳线 if prev_change<-0.02 and change>0 \ and prev_close < open and prev_open > close \ and close > open *1.01: if os.environ['DEBUG']=='ON': print("black bar contains red bar YunXian") decision = True # 按振幅判断, 如果后3日振幅相比前7日振幅扩大 并且当日是3日最低 short_his = dataset[-7:].copy() short_his['amp'] = short_his['high'] - short_his['low'] last_amp = short_his['amp'][-3:].mean() prev_amp = short_his['amp'][:-3].mean() if last_amp > prev_amp*2 \ and price == short_his['close'].min() \ and v_pos > 0.4: if os.environ['DEBUG']=='ON': print('double amp in down trend') decision = True return decision
def make_frame(k): ''' loop each frame in multiprocessing ''' if not black: fig=plt.figure(1, figsize=(19.2,10.8), dpi=100) #full hd #fig=plt.figure(1, figsize=(19.2*2,10.8*2), dpi=100) #4k ax = plt.subplot2grid((5,2), (0, 0), rowspan=5, projection='polar') backcolor='black' psp_color='black' bepi_color='blue' solo_color='green' if black: fig=plt.figure(1, figsize=(19.9,11), dpi=100, facecolor='black', edgecolor='black') ax = plt.subplot(121,projection='polar',facecolor='black') #ax = plt.subplot2grid((5,2), (0, 0), rowspan=5, projection='polar') backcolor='white' psp_color='white' bepi_color='skyblue' solo_color='springgreen' sta_color='salmon' frame_time_str=str(mdates.num2date(frame_time_num+k*res_in_days)) print( 'current frame_time_num', frame_time_str, ' ',k) #these have their own times dct=frame_time_num+k*res_in_days-psp.time psp_timeind=np.argmin(abs(dct)) dct=frame_time_num+k*res_in_days-bepi.time bepi_timeind=np.argmin(abs(dct)) dct=frame_time_num+k*res_in_days-solo.time solo_timeind=np.argmin(abs(dct)) #all same times dct=frame_time_num+k*res_in_days-earth.time earth_timeind=np.argmin(abs(dct)) #plot all positions including text R lon lat for some #white background if not black: ax.scatter(venus.lon[earth_timeind], venus.r[earth_timeind]*np.cos(venus.lat[earth_timeind]), s=symsize_planet, c='orange', alpha=1,lw=0,zorder=3) ax.scatter(mercury.lon[earth_timeind], mercury.r[earth_timeind]*np.cos(mercury.lat[earth_timeind]), s=symsize_planet, c='dimgrey', alpha=1,lw=0,zorder=3) ax.scatter(earth.lon[earth_timeind], earth.r[earth_timeind]*np.cos(earth.lat[earth_timeind]), s=symsize_planet, c='mediumseagreen', alpha=1,lw=0,zorder=3) ax.scatter(sta.lon[earth_timeind], sta.r[earth_timeind]*np.cos(sta.lat[earth_timeind]), s=symsize_spacecraft, c='red', marker='s', alpha=1,lw=0,zorder=3) ax.scatter(mars.lon[earth_timeind], mars.r[earth_timeind]*np.cos(mars.lat[earth_timeind]), s=symsize_planet, c='orangered', alpha=1,lw=0,zorder=3) plt.figtext(0.95,0.75,'PSP ', color='black', ha='center',fontsize=fsize+3) plt.figtext(0.95,0.5,'Wind', color='mediumseagreen', ha='center',fontsize=fsize+3) plt.figtext(0.95,0.25,'STEREO-A', color='red', ha='center',fontsize=fsize+3) ''' plt.figtext(0.9,0.9,'Mercury', color='dimgrey', ha='center',fontsize=fsize+5) plt.figtext(0.9 ,0.8,'Venus', color='orange', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.7,'Earth', color='mediumseagreen', ha='center',fontsize=fsize+5) #plt.figtext(0.9,0.7,'Mars', color='orangered', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.6,'STEREO-A', color='red', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.5,'Parker Solar Probe', color='black', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.4,'Bepi Colombo', color='blue', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.3,'Solar Orbiter', color='green', ha='center',fontsize=fsize+5) ''' #black background if black: ax.scatter(venus.lon[earth_timeind], venus.r[earth_timeind]*np.cos(venus.lat[earth_timeind]), s=symsize_planet, c='orange', alpha=1,lw=0,zorder=3) ax.scatter(mercury.lon[earth_timeind], mercury.r[earth_timeind]*np.cos(mercury.lat[earth_timeind]), s=symsize_planet, c='grey', alpha=1,lw=0,zorder=3) ax.scatter(earth.lon[earth_timeind], earth.r[earth_timeind]*np.cos(earth.lat[earth_timeind]), s=symsize_planet, c='mediumseagreen', alpha=1,lw=0,zorder=3) ax.scatter(sta.lon[earth_timeind], sta.r[earth_timeind]*np.cos(sta.lat[earth_timeind]), s=symsize_spacecraft, c=sta_color, marker='s', alpha=1,lw=0,zorder=3) #ax.scatter(mars.lon[earth_timeind], mars.r[earth_timeind]*np.cos(mars.lat[earth_timeind]), s=symsize_planet, c='orangered', alpha=1,lw=0,zorder=3) plt.figtext(0.9,0.9,'Mercury', color='grey', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.8,'Venus', color='orange', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.7,'Earth', color='mediumseagreen', ha='center',fontsize=fsize+5) #plt.figtext(0.9,0.6,'Mars', color='orangered', ha='center',fontsize=fsize+5) plt.figtext(0.9,0.6,'STEREO-A', color=sta_color, ha='center',fontsize=fsize+5) plt.figtext(0.9,0.5,'Parker Solar Probe', color=psp_color, ha='center',fontsize=fsize+5) plt.figtext(0.9,0.4,'Bepi Colombo', color=bepi_color, ha='center',fontsize=fsize+5) plt.figtext(0.9,0.3,'Solar Orbiter', color=solo_color, ha='center',fontsize=fsize+5) #positions text f10=plt.figtext(0.01,0.93,' R lon lat', fontsize=fsize+2, ha='left',color=backcolor) if frame=='HEEQ': earth_text='Earth: '+str(f'{earth.r[earth_timeind]:6.2f}')+str(f'{0.0:8.1f}')+str(f'{np.rad2deg(earth.lat[earth_timeind]):8.1f}') else: earth_text='Earth: '+str(f'{earth.r[earth_timeind]:6.2f}')+str(f'{np.rad2deg(earth.lon[earth_timeind]):8.1f}')+str(f'{np.rad2deg(earth.lat[earth_timeind]):8.1f}') mars_text='Mars: '+str(f'{mars.r[earth_timeind]:6.2f}')+str(f'{np.rad2deg(mars.lon[earth_timeind]):8.1f}')+str(f'{np.rad2deg(mars.lat[earth_timeind]):8.1f}') sta_text='STA: '+str(f'{sta.r[earth_timeind]:6.2f}')+str(f'{np.rad2deg(sta.lon[earth_timeind]):8.1f}')+str(f'{np.rad2deg(sta.lat[earth_timeind]):8.1f}') #position and text if psp_timeind > 0: #plot trajectorie ax.scatter(psp.lon[psp_timeind], psp.r[psp_timeind]*np.cos(psp.lat[psp_timeind]), s=symsize_spacecraft, c=psp_color, marker='s', alpha=1,lw=0,zorder=3) #plot positiona as text psp_text='PSP: '+str(f'{psp.r[psp_timeind]:6.2f}')+str(f'{np.rad2deg(psp.lon[psp_timeind]):8.1f}')+str(f'{np.rad2deg(psp.lat[psp_timeind]):8.1f}') f5=plt.figtext(0.01,0.78,psp_text, fontsize=fsize, ha='left',color=psp_color) if plot_orbit: fadestart=psp_timeind-fadeind if fadestart < 0: fadestart=0 ax.plot(psp.lon[fadestart:psp_timeind+fadeind], psp.r[fadestart:psp_timeind+fadeind]*np.cos(psp.lat[fadestart:psp_timeind+fadeind]), c=psp_color, alpha=0.6,lw=1,zorder=3) if bepi_timeind > 0: ax.scatter(bepi.lon[bepi_timeind], bepi.r[bepi_timeind]*np.cos(bepi.lat[bepi_timeind]), s=symsize_spacecraft, c=bepi_color, marker='s', alpha=1,lw=0,zorder=3) bepi_text='Bepi: '+str(f'{bepi.r[bepi_timeind]:6.2f}')+str(f'{np.rad2deg(bepi.lon[bepi_timeind]):8.1f}')+str(f'{np.rad2deg(bepi.lat[bepi_timeind]):8.1f}') f6=plt.figtext(0.01,0.74,bepi_text, fontsize=fsize, ha='left',color=bepi_color) if plot_orbit: fadestart=bepi_timeind-fadeind if fadestart < 0: fadestart=0 ax.plot(bepi.lon[fadestart:bepi_timeind+fadeind], bepi.r[fadestart:bepi_timeind+fadeind]*np.cos(bepi.lat[fadestart:bepi_timeind+fadeind]), c=bepi_color, alpha=0.6,lw=1,zorder=3) if solo_timeind > 0: ax.scatter(solo.lon[solo_timeind], solo.r[solo_timeind]*np.cos(solo.lat[solo_timeind]), s=symsize_spacecraft, c=solo_color, marker='s', alpha=1,lw=0,zorder=3) solo_text='SolO: '+str(f'{solo.r[solo_timeind]:6.2f}')+str(f'{np.rad2deg(solo.lon[solo_timeind]):8.1f}')+str(f'{np.rad2deg(solo.lat[solo_timeind]):8.1f}') f7=plt.figtext(0.01,0.7,solo_text, fontsize=fsize, ha='left',color=solo_color) if plot_orbit: fadestart=solo_timeind-fadeind if fadestart < 0: fadestart=0 ax.plot(solo.lon[fadestart:solo_timeind+fadeind], solo.r[fadestart:solo_timeind+fadeind]*np.cos(solo.lat[fadestart:solo_timeind+fadeind]), c=solo_color, alpha=0.6,lw=1,zorder=3) f10=plt.figtext(0.01,0.9,earth_text, fontsize=fsize, ha='left',color='mediumseagreen') f9=plt.figtext(0.01,0.86,mars_text, fontsize=fsize, ha='left',color='orangered') f8=plt.figtext(0.01,0.82,sta_text, fontsize=fsize, ha='left',color='red') #parker spiral if plot_parker: for q in np.arange(0,12): omega=2*np.pi/(sun_rot*60*60*24) #solar rotation in seconds v=400/AUkm #km/s r0=695000/AUkm r=v/omega*theta+r0*7 if not black: ax.plot(-theta+np.deg2rad(0+(360/24.47)*res_in_days*k+360/12*q), r, alpha=0.4, lw=0.5,color='grey',zorder=2) if black: ax.plot(-theta+np.deg2rad(0+(360/24.47)*res_in_days*k+360/12*q), r, alpha=0.7, lw=0.7,color='grey',zorder=2) #set axes and grid ax.set_theta_zero_location('E') #plt.thetagrids(range(0,360,45),(u'0\u00b0 '+frame+' longitude',u'45\u00b0',u'90\u00b0',u'135\u00b0',u'+/- 180\u00b0',u'- 135\u00b0',u'- 90\u00b0',u'- 45\u00b0'), ha='right', fmt='%d',fontsize=fsize-1,color=backcolor, alpha=0.9) plt.thetagrids(range(0,360,45),(u'0\u00b0',u'45\u00b0',u'90\u00b0',u'135\u00b0',u'+/- 180\u00b0',u'- 135\u00b0',u'- 90\u00b0',u'- 45\u00b0'), ha='center', fmt='%d',fontsize=fsize-1,color=backcolor, alpha=0.9,zorder=4) #plt.rgrids((0.10,0.39,0.72,1.00,1.52),('0.10','0.39','0.72','1.0','1.52 AU'),angle=125, fontsize=fsize,alpha=0.9, color=backcolor) plt.rgrids((0.1,0.3,0.5,0.7,1.0),('0.10','0.3','0.5','0.7','1.0 AU'),angle=125, fontsize=fsize-3,alpha=0.5, color=backcolor) #ax.set_ylim(0, 1.75) #with Mars ax.set_ylim(0, 1.2) #Sun ax.scatter(0,0,s=100,c='yellow',alpha=1, edgecolors='black', linewidth=0.3) ############################ IN SITU DATA time_now=frame_time_num+k*res_in_days #cut put current time window in data pindex1=np.where(p_time_num > time_now-days_window)[0][0] pindex2=np.where(p_time_num > time_now+days_window)[0][0] p=p1[pindex1:pindex2] sindex1=np.where(s_time_num > time_now-days_window)[0][0] sindex2=np.where(s_time_num > time_now+days_window)[0][0] s=s1[sindex1:sindex2] windex1=np.where(w_time_num > time_now-days_window)[0][0] windex2=np.where(w_time_num > time_now+days_window)[0][0] w=w1[windex1:windex2] #### PSP ax2 = plt.subplot2grid((6,2), (0, 1)) ''' plt.plot_date(p_tm,pbx,'-r',label='BR',linewidth=0.5) plt.plot_date(p_tm,pby,'-g',label='BT',linewidth=0.5) plt.plot_date(p_tm,pbz,'-b',label='BN',linewidth=0.5) plt.plot_date(p_tm,pbt,'-k',label='Btotal',lw=0.5) ''' plt.plot_date(p.time,p.bx,'-r',label='BR',linewidth=0.5) plt.plot_date(p.time,p.by,'-g',label='BT',linewidth=0.5) plt.plot_date(p.time,p.bz,'-b',label='BN',linewidth=0.5) plt.plot_date(p.time,p.bt,'-k',label='Btotal',lw=0.5) ax2.plot_date([time_now,time_now], [-100,100],'-k', lw=0.5, alpha=0.8) ax2.set_ylabel('B [nT]',fontsize=fsize-1) ax2.xaxis.set_major_formatter( matplotlib.dates.DateFormatter('%b-%d') ) ax2.set_xlim(time_now-days_window,time_now+days_window) plt.ylim((-110, 110)) ax2.set_xticklabels([]) plt.yticks(fontsize=fsize-1) ax3 = plt.subplot2grid((6,2), (1, 1)) #plt.plot_date(p_tp,pv,'-k',label='V',linewidth=0.5) plt.plot_date(p.time,p.vt,'-k',label='V',linewidth=0.7) ax3.set_xlim(time_now-days_window,time_now+days_window) ax3.plot_date([time_now,time_now], [0,800],'-k', lw=0.5, alpha=0.8) ax3.xaxis.set_major_formatter( matplotlib.dates.DateFormatter('%b-%d') ) plt.ylabel('V [km/s]',fontsize=fsize-1) plt.ylim((240, 810)) plt.yticks(fontsize=fsize-1) ax3.set_xticklabels([]) ########## Wind ax4 = plt.subplot2grid((6,2), (2, 1)) #plt.plot_date(w_tm,wbx,'-r',label='BR',linewidth=0.5) #plt.plot_date(w_tm,wby,'-g',label='BT',linewidth=0.5) #plt.plot_date(w_tm,wbz,'-b',label='BN',linewidth=0.5) #plt.plot_date(w_tm,wbt,'-k',label='Btotal',lw=0.5) plt.plot_date(w.time,w.bx,'-r',label='BR',linewidth=0.5) plt.plot_date(w.time,w.by,'-g',label='BT',linewidth=0.5) plt.plot_date(w.time,w.bz,'-b',label='BN',linewidth=0.5) plt.plot_date(w.time,w.bt,'-k',label='Btotal',lw=0.5) ax4.plot_date([time_now,time_now], [-100,100],'-k', lw=0.5, alpha=0.8) ax4.set_ylabel('B [nT]',fontsize=fsize-1) ax4.xaxis.set_major_formatter( matplotlib.dates.DateFormatter('%b-%d') ) ax4.set_xlim(time_now-days_window,time_now+days_window) plt.ylim((-18, 18)) plt.yticks(fontsize=fsize-1) ax4.set_xticklabels([]) ax5 = plt.subplot2grid((6,2), (3, 1)) plt.plot_date(w.time,w.vt,'-k',label='V',linewidth=0.7) #plt.plot_date(w_tp,wv,'-k',label='V',linewidth=0.5) ax5.plot_date([time_now,time_now], [0,800],'-k', lw=0.5, alpha=0.8) ax5.set_xlim(time_now-days_window,time_now+days_window) plt.ylabel('V [km/s]',fontsize=fsize-1) plt.ylim((240, 810)) plt.yticks(fontsize=fsize-1) ax5.set_xticklabels([]) ##### STEREO-A ax6 = plt.subplot2grid((6,2), (4, 1)) #plt.plot_date(s_tm,sbx,'-r',label='BR',linewidth=0.5) #plt.plot_date(s_tm,sby,'-g',label='BT',linewidth=0.5) #plt.plot_date(s_tm,sbz,'-b',label='BN',linewidth=0.5) #plt.plot_date(s_tm,sbt,'-k',label='Btotal') plt.plot_date(s.time,s.bx,'-r',label='BR',linewidth=0.5) plt.plot_date(s.time,s.by,'-g',label='BT',linewidth=0.5) plt.plot_date(s.time,s.bz,'-b',label='BN',linewidth=0.5) plt.plot_date(s.time,s.bt,'-k',label='Btotal',linewidth=0.5) ax6.set_ylabel('B [nT]',fontsize=fsize-1) ax6.plot_date([time_now,time_now], [-100,100],'-k', lw=0.5, alpha=0.8) #ax6.xaxis.set_major_formatter( matplotlib.dates.DateFormatter('%b-%d') ) ax6.set_xlim(time_now-days_window,time_now+days_window) ax6.set_xticklabels([]) plt.yticks(fontsize=fsize-1) plt.tick_params( axis='x', labelbottom='off') plt.ylim((-18, 18)) ax7 = plt.subplot2grid((6,2), (5, 1)) plt.plot_date(s.time,s.vt,'-k',label='V',linewidth=0.7) ax7.plot_date([time_now,time_now], [0,800],'-k', lw=0.5, alpha=0.8) ax7.set_xlim(time_now-days_window,time_now+days_window) ax7.xaxis.set_major_formatter( matplotlib.dates.DateFormatter('%b-%d') ) plt.ylabel('V [km/s]',fontsize=fsize-1) plt.tick_params(axis='x', labelbottom='off') plt.ylim((240, 810)) plt.yticks(fontsize=fsize-1) plt.xticks(fontsize=fsize) ############################ #plot text for date extra so it does not move #year f1=plt.figtext(0.45,0.93,frame_time_str[0:4], ha='center',color=backcolor,fontsize=fsize+6) #month f2=plt.figtext(0.45+0.04,0.93,frame_time_str[5:7], ha='center',color=backcolor,fontsize=fsize+6) #day f3=plt.figtext(0.45+0.08,0.93,frame_time_str[8:10], ha='center',color=backcolor,fontsize=fsize+6) #hours f4=plt.figtext(0.45+0.12,0.93,frame_time_str[11:13], ha='center',color=backcolor,fontsize=fsize+6) plt.figtext(0.02, 0.02,'Spacecraft trajectories '+frame+' 2D projection', fontsize=fsize-1, ha='left',color=backcolor) plt.figtext(0.35,0.02,'――― trajectory from - 60 days to + 60 days', color='black', ha='center',fontsize=fsize-3) #signature plt.figtext(0.99,0.01/2,'Möstl, Weiss, Bailey / Helio4Cast', fontsize=fsize-4, ha='right',color=backcolor) #save figure framestr = '%05i' % (k) filename=outputdirectory+'/pos_anim_'+framestr+'.jpg' if k==0: print(filename) plt.savefig(filename,dpi=100,facecolor=fig.get_facecolor(), edgecolor='none') #plt.clf() plt.close('all')
with open(modelFile, 'rb') as f: (rmsError120Z, rmsBest, rmsClimatology, biasError120Z, biasBest, biasClimatology, dateNum) = pickle.load(f) with open(modelFile2, 'rb') as f: (BESTESTIMATION, CLIMATOLOGY, FORECAST, DEPTH, TEMPERATURE, SALINITY) = pickle.load(f) else: di = 0 for datei in datenum: print datei dt0 = datetime.strptime(datei, "%Y-%m-%d %H:%M:%S") dt1 = mdates.date2num(dt0) dt2 = mdates.num2date(dt1) # print dt0,dt2 taxis.append(dt1) dayStr = str(dt0.year) + str(dt0.month).rjust(2, '0') + str( dt0.day).rjust(2, '0') print dayStr fname, fileExist = getFname(fCls4path, dayStr, modelStr) if fileExist: novaIn, climatology, persistence, forecast, bestEstimate, dayObs, depth = readfromCls4file( fname, dayStr) print type(dayObs), np.shape(dayObs)
def GOFS_RTOFS_vs_Argo_floats(lon_forec_track, lat_forec_track, lon_forec_cone, lat_forec_cone, lon_best_track, lat_best_track, lon_lim, lat_lim, folder_fig): #%% User input #GOFS3.1 output model location url_GOFS_ts = 'http://tds.hycom.org/thredds/dodsC/GLBy0.08/expt_93.0/ts3z' # RTOFS files folder_RTOFS = '/home/coolgroup/RTOFS/forecasts/domains/hurricanes/RTOFS_6hourly_North_Atlantic/' nc_files_RTOFS = ['rtofs_glo_3dz_f006_6hrly_hvr_US_east.nc',\ 'rtofs_glo_3dz_f012_6hrly_hvr_US_east.nc',\ 'rtofs_glo_3dz_f018_6hrly_hvr_US_east.nc',\ 'rtofs_glo_3dz_f024_6hrly_hvr_US_east.nc'] # COPERNICUS MARINE ENVIRONMENT MONITORING SERVICE (CMEMS) url_cmems = 'http://nrt.cmems-du.eu/motu-web/Motu' service_id = 'GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS' product_id = 'global-analysis-forecast-phy-001-024' depth_min = '0.493' out_dir = '/home/aristizabal/crontab_jobs' # Bathymetry file #bath_file = '/Users/aristizabal/Desktop/MARACOOS_project/Maria_scripts/nc_files/GEBCO_2014_2D_-100.0_0.0_-60.0_45.0.nc' bath_file = '/home/aristizabal/bathymetry_files/GEBCO_2014_2D_-100.0_0.0_-10.0_50.0.nc' # Argo floats url_Argo = 'http://www.ifremer.fr/erddap' #%% from matplotlib import pyplot as plt import numpy as np import xarray as xr import netCDF4 from datetime import datetime, timedelta import cmocean import matplotlib.dates as mdates from erddapy import ERDDAP import pandas as pd import os # Do not produce figures on screen plt.switch_backend('agg') # Increase fontsize of labels globally plt.rc('xtick', labelsize=14) plt.rc('ytick', labelsize=14) plt.rc('legend', fontsize=14) #%% Reading bathymetry data ncbath = xr.open_dataset(bath_file) bath_lat = ncbath.variables['lat'][:] bath_lon = ncbath.variables['lon'][:] bath_elev = ncbath.variables['elevation'][:] oklatbath = np.logical_and(bath_lat >= lat_lim[0], bath_lat <= lat_lim[-1]) oklonbath = np.logical_and(bath_lon >= lon_lim[0], bath_lon <= lon_lim[-1]) bath_latsub = bath_lat[oklatbath] bath_lonsub = bath_lon[oklonbath] bath_elevs = bath_elev[oklatbath, :] bath_elevsub = bath_elevs[:, oklonbath] #%% Get time bounds for current day #ti = datetime.today() ti = datetime.today() - timedelta(1) - timedelta(hours=6) tini = datetime(ti.year, ti.month, ti.day) te = ti + timedelta(2) tend = datetime(te.year, te.month, te.day) #%% Look for Argo datasets e = ERDDAP(server=url_Argo) # Grab every dataset available #datasets = pd.read_csv(e.get_search_url(response='csv', search_for='all')) kw = { 'min_lon': lon_lim[0], 'max_lon': lon_lim[1], 'min_lat': lat_lim[0], 'max_lat': lat_lim[1], 'min_time': str(tini), 'max_time': str(tend), } search_url = e.get_search_url(response='csv', **kw) # Grab the results search = pd.read_csv(search_url) # Extract the IDs dataset = search['Dataset ID'].values msg = 'Found {} Datasets:\n\n{}'.format print(msg(len(dataset), '\n'.join(dataset))) dataset_type = dataset[0] constraints = { 'time>=': str(tini), 'time<=': str(tend), 'latitude>=': lat_lim[0], 'latitude<=': lat_lim[1], 'longitude>=': lon_lim[0], 'longitude<=': lon_lim[1], } variables = [ 'platform_number', 'time', 'pres', 'longitude', 'latitude', 'temp', 'psal', ] e = ERDDAP(server=url_Argo, protocol='tabledap', response='nc') e.dataset_id = dataset_type e.constraints = constraints e.variables = variables print(e.get_download_url()) df = e.to_pandas( parse_dates=True, skiprows=(1, ) # units information can be dropped. ).dropna() argo_ids = np.asarray(df['platform_number']) argo_times = np.asarray(df['time (UTC)']) argo_press = np.asarray(df['pres (decibar)']) argo_lons = np.asarray(df['longitude (degrees_east)']) argo_lats = np.asarray(df['latitude (degrees_north)']) argo_temps = np.asarray(df['temp (degree_Celsius)']) argo_salts = np.asarray(df['psal (PSU)']) #%% GOGF 3.1 try: GOFS_ts = xr.open_dataset(url_GOFS_ts, decode_times=False) lt_GOFS = np.asarray(GOFS_ts['lat'][:]) ln_GOFS = np.asarray(GOFS_ts['lon'][:]) tt = GOFS_ts['time'] t_GOFS = netCDF4.num2date(tt[:], tt.units) depth_GOFS = np.asarray(GOFS_ts['depth'][:]) except Exception as err: print(err) GOFS_ts = np.nan lt_GOFS = np.nan ln_GOFS = np.nan depth_GOFS = np.nan t_GOFS = ti #%% Map Argo floats lev = np.arange(-9000, 9100, 100) plt.figure() plt.contourf(bath_lonsub, bath_latsub, bath_elevsub, lev, cmap=cmocean.cm.topo) plt.plot(lon_forec_track, lat_forec_track, '.-', color='gold') plt.plot(lon_forec_cone, lat_forec_cone, '.-b', markersize=1) plt.plot(lon_best_track, lat_best_track, 'or', markersize=3) argo_idd = np.unique(argo_ids) for i, id in enumerate(argo_idd): okind = np.where(argo_ids == id)[0] plt.plot(np.unique(argo_lons[okind]), np.unique(argo_lats[okind]), 's', color='darkorange', markersize=5, markeredgecolor='k') plt.title('Argo Floats ' + str(tini)[0:13] + '-' + str(tend)[0:13], fontsize=16) plt.axis('scaled') plt.xlim(lon_lim[0], lon_lim[1]) plt.ylim(lat_lim[0], lat_lim[1]) file = folder_fig + 'ARGO_lat_lon' #file = folder_fig + 'ARGO_lat_lon_' + str(np.unique(argo_times)[0])[0:10] plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% Figure argo float vs GOFS and vs RTOFS argo_idd = np.unique(argo_ids) for i, id in enumerate(argo_idd): print(id) okind = np.where(argo_ids == id)[0] argo_time = np.asarray([ datetime.strptime(t, '%Y-%m-%dT%H:%M:%SZ') for t in argo_times[okind] ]) argo_lon = argo_lons[okind] argo_lat = argo_lats[okind] argo_pres = argo_press[okind] argo_temp = argo_temps[okind] argo_salt = argo_salts[okind] # GOFS print('Retrieving variables from GOFS') if isinstance(GOFS_ts, float): temp_GOFS = np.nan salt_GOFS = np.nan else: #oktt_GOFS = np.where(t_GOFS >= argo_time[0])[0][0] ttGOFS = np.asarray([ datetime(t_GOFS[i].year, t_GOFS[i].month, t_GOFS[i].day, t_GOFS[i].hour) for i in np.arange(len(t_GOFS)) ]) tstamp_GOFS = [ mdates.date2num(ttGOFS[i]) for i in np.arange(len(ttGOFS)) ] oktt_GOFS = np.unique( np.round( np.interp(mdates.date2num(argo_time[0]), tstamp_GOFS, np.arange(len(tstamp_GOFS)))).astype(int))[0] oklat_GOFS = np.where(lt_GOFS >= argo_lat[0])[0][0] oklon_GOFS = np.where(ln_GOFS >= argo_lon[0] + 360)[0][0] temp_GOFS = np.asarray(GOFS_ts['water_temp'][oktt_GOFS, :, oklat_GOFS, oklon_GOFS]) salt_GOFS = np.asarray(GOFS_ts['salinity'][oktt_GOFS, :, oklat_GOFS, oklon_GOFS]) # RTOFS #Time window year = int(argo_time[0].year) month = int(argo_time[0].month) day = int(argo_time[0].day) tini = datetime(year, month, day) tend = tini + timedelta(days=1) # Read RTOFS grid and time print('Retrieving coordinates from RTOFS') if tini.month < 10: if tini.day < 10: fol = 'rtofs.' + str(tini.year) + '0' + str( tini.month) + '0' + str(tini.day) else: fol = 'rtofs.' + str(tini.year) + '0' + str(tini.month) + str( tini.day) else: if tini.day < 10: fol = 'rtofs.' + str(tini.year) + str(tini.month) + '0' + str( tini.day) else: fol = 'rtofs.' + str(tini.year) + str(tini.month) + str( tini.day) ncRTOFS = xr.open_dataset(folder_RTOFS + fol + '/' + nc_files_RTOFS[0]) latRTOFS = np.asarray(ncRTOFS.Latitude[:]) lonRTOFS = np.asarray(ncRTOFS.Longitude[:]) depth_RTOFS = np.asarray(ncRTOFS.Depth[:]) tRTOFS = [] for t in np.arange(len(nc_files_RTOFS)): ncRTOFS = xr.open_dataset(folder_RTOFS + fol + '/' + nc_files_RTOFS[t]) tRTOFS.append(np.asarray(ncRTOFS.MT[:])[0]) tRTOFS = np.asarray([mdates.num2date(mdates.date2num(tRTOFS[t])) \ for t in np.arange(len(nc_files_RTOFS))]) oktt_RTOFS = np.where( mdates.date2num(tRTOFS) >= mdates.date2num(argo_time[0]))[0][0] oklat_RTOFS = np.where(latRTOFS[:, 0] >= argo_lat[0])[0][0] oklon_RTOFS = np.where(lonRTOFS[0, :] >= argo_lon[0])[0][0] nc_file = folder_RTOFS + fol + '/' + nc_files_RTOFS[oktt_RTOFS] ncRTOFS = xr.open_dataset(nc_file) #time_RTOFS = tRTOFS[oktt_RTOFS] temp_RTOFS = np.asarray(ncRTOFS.variables['temperature'][0, :, oklat_RTOFS, oklon_RTOFS]) salt_RTOFS = np.asarray(ncRTOFS.variables['salinity'][0, :, oklat_RTOFS, oklon_RTOFS]) #lon_RTOFS = lonRTOFS[0,oklon_RTOFS] #lat_RTOFS = latRTOFS[oklat_RTOFS,0] # Downloading and reading Copernicus output motuc = 'python -m motuclient --motu ' + url_cmems + \ ' --service-id ' + service_id + \ ' --product-id ' + product_id + \ ' --longitude-min ' + str(argo_lon[0]-2/12) + \ ' --longitude-max ' + str(argo_lon[0]+2/12) + \ ' --latitude-min ' + str(argo_lat[0]-2/12) + \ ' --latitude-max ' + str(argo_lat[0]+2/12) + \ ' --date-min ' + '"' + str(tini-timedelta(0.5)) + '"' + \ ' --date-max ' + '"' + str(tend+timedelta(0.5)) + '"' + \ ' --depth-min ' + depth_min + \ ' --depth-max ' + str(np.nanmax(argo_pres)+1000) + \ ' --variable ' + 'thetao' + ' ' + \ ' --variable ' + 'so' + ' ' + \ ' --out-dir ' + out_dir + \ ' --out-name ' + str(id) + '.nc' + ' ' + \ ' --user ' + 'maristizabalvar' + ' ' + \ ' --pwd ' + 'MariaCMEMS2018' os.system(motuc) # Check if file was downloaded COP_file = out_dir + '/' + str(id) + '.nc' # Check if file was downloaded resp = os.system('ls ' + out_dir + '/' + str(id) + '.nc') if resp == 0: COP = xr.open_dataset(COP_file) latCOP = np.asarray(COP.latitude[:]) lonCOP = np.asarray(COP.longitude[:]) depth_COP = np.asarray(COP.depth[:]) tCOP = np.asarray(mdates.num2date(mdates.date2num(COP.time[:]))) else: latCOP = np.empty(1) latCOP[:] = np.nan lonCOP = np.empty(1) lonCOP[:] = np.nan tCOP = np.empty(1) tCOP[:] = np.nan oktimeCOP = np.where( mdates.date2num(tCOP) >= mdates.date2num(tini))[0][0] oklonCOP = np.where(lonCOP >= argo_lon[0])[0][0] oklatCOP = np.where(latCOP >= argo_lat[0])[0][0] temp_COP = np.asarray(COP.variables['thetao'][oktimeCOP, :, oklatCOP, oklonCOP]) salt_COP = np.asarray(COP.variables['so'][oktimeCOP, :, oklatCOP, oklonCOP]) # Figure temp plt.figure(figsize=(5, 6)) plt.plot(argo_temp, -argo_pres, '.-', linewidth=2, label='ARGO Float id ' + str(id)) plt.plot(temp_GOFS, -depth_GOFS, '.-', linewidth=2, label='GOFS 3.1', color='red') plt.plot(temp_RTOFS, -depth_RTOFS, '.-', linewidth=2, label='RTOFS', color='g') plt.plot(temp_COP, -depth_COP, '.-', linewidth=2, label='Copernicus', color='darkorchid') plt.ylim([-1000, 0]) plt.title('Temperature Profile on '+ str(argo_time[0])[0:13] + '\n [lon,lat] = [' \ + str(np.round(argo_lon[0],3)) +',' +\ str(np.round(argo_lat[0],3))+']',\ fontsize=16) plt.ylabel('Depth (m)', fontsize=14) plt.xlabel('$^oC$', fontsize=14) plt.legend(loc='lower right', fontsize=14) file = folder_fig + 'ARGO_vs_GOFS_RTOFS_COP_temp_' + str(id) plt.savefig(file, bbox_inches='tight', pad_inches=0.1) # Figure salt plt.figure(figsize=(5, 6)) plt.plot(argo_salt, -argo_pres, '.-', linewidth=2, label='ARGO Float id ' + str(id)) plt.plot(salt_GOFS, -depth_GOFS, '.-', linewidth=2, label='GOFS 3.1', color='red') plt.plot(salt_RTOFS, -depth_RTOFS, '.-', linewidth=2, label='RTOFS', color='g') plt.plot(salt_COP, -depth_COP, '.-', linewidth=2, label='Copernicus', color='darkorchid') plt.ylim([-1000, 0]) plt.title('Salinity Profile on '+ str(argo_time[0])[0:13] + '\n [lon,lat] = [' \ + str(np.round(argo_lon[0],3)) +',' +\ str(np.round(argo_lat[0],3))+']',\ fontsize=16) plt.ylabel('Depth (m)', fontsize=14) plt.legend(loc='lower right', fontsize=14) file = folder_fig + 'ARGO_vs_GOFS_RTOFS_COP_salt_' + str(id) plt.savefig(file, bbox_inches='tight', pad_inches=0.1)
def should_sell(dataset): len = 120 decision = False subset = dataset[-len:] short_his = subset[-5:].copy() short_his['amp'] = short_his['high'] - short_his['low'] ma_amp = short_his['amp'].mean() epsilon = ma_amp points = find_turn_points(subset, epsilon) if points.shape[0]<4: return False if dataset.shape[0]<=0: return False last_turn_pt = points['num_date'].iloc[-2] days_since_last_turnpoint = dataset[dataset.num_date>last_turn_pt].shape[0] price = subset['close'].iloc[-1] low = subset['low'].iloc[-1] open = subset['open'].iloc[-1] v_pos = (price - subset['close'].min()) / (subset['close'].max() - subset['close'].min()) if points['direction'].iloc[-2]=='down': last_down = (points['price'].iloc[-2] - points['price'].iloc[-1]) / points['price'].iloc[-2] last_up = (points['price'].iloc[-2] - points['price'].iloc[-3]) / points['price'].iloc[-2] prev_down = (points['price'].iloc[-4] - points['price'].iloc[-3]) / points['price'].iloc[-4] if days_since_last_turnpoint<=2 \ and last_down>0.04 \ and v_pos>0.2: if os.environ['DEBUG']=='ON': print('sell it',days_since_last_turnpoint) decision = True # 提前下车逻辑 if points['direction'].iloc[-2]=='up': last_down = (points['price'].iloc[-3] - points['price'].iloc[-2]) / points['price'].iloc[-3] last_up = (points['price'].iloc[-1] - points['price'].iloc[-2]) / points['price'].iloc[-1] prev_down = 0 fuzzy_range = 0.03 fuzzy_range_low = 0.03 sell_signal_count = 0 pos = 1 pressure_points = points[(points.direction=='down') & (points.price<price*(1+fuzzy_range))] while(pos<=pressure_points.shape[0]): point = pressure_points['price'].iloc[-pos] num_date = pressure_points['num_date'].iloc[-pos] date = mdates.num2date(num_date) if os.environ['DEBUG']=='ON': print("{:.10}\t p:{:.2f}\t scope: {:.2f} - {:.2f}\t last_down:{:.2f}\t up:{:.2f}".format(str(date), price, point*(1-fuzzy_range_low), point*(1+fuzzy_range),last_down, last_up )) if (point*(1+fuzzy_range) > price and point*(1-fuzzy_range_low) < price) \ and price > open: sell_signal_count +=1 break pos += 1 if sell_signal_count>0: if v_pos > 0.35 and v_pos <0.5: if subset['close'][-5:].max() == price: decision = True if os.environ['DEBUG']=='ON': print('{:.10}\t sell: {} \tdown: {:.3f}/000 \tup:{:.3f}\t v_pos:{:.2f}\t d:{}'\ .format(str(subset.iloc[-1].name), decision,last_down,last_up,v_pos,points['direction'].iloc[-2])) last_amp = short_his['amp'][-3:].mean() prev_amp = short_his['amp'][:-3].mean() if last_amp > prev_amp*2 \ and open > price\ and v_pos > 0.2 and v_pos<0.8: if os.environ['DEBUG']=='ON': print('double amp in down trend') decision = True return decision
def set_normal_timestamp(self, date): date_object = num2date(date) self.normal_timestamp = date_object.strftime('%Y-%m-%d')
def collate_data(conversation, cumulative=False, bin_size=7, group_messages=False, start_date=mdates.num2date(730120), end_date=mdates.num2date(1000000)): """ Expects a WhatsApp Chat log as a list item, and returns x data (time) and y data (messages sent, as a dict) Can return cumulative data for stack plots, or non cumulative for bar/line graphs. For non cumulative data, you can group messages in bins (eg. bin_size=7 is messages grouped by week) If someone tends to send lots of short messages at once, you can set group_messages=True to treat them as one """ # Create x axes of Time data if start_date > end_date: print("Error. start_date is after end date") exit() if start_date == mdates.num2date(730120): # if user hasn't specified a start date t1 = int(mdates.date2num(conversation.message_log[0]["date"].replace(hour=0, minute=0))) # First day where messeages were sent else: t1 = int(mdates.date2num(start_date)) # User specified start date if end_date == mdates.num2date(1000000): # if user hasn't specified an end date t2 = int(mdates.date2num(conversation.message_log[-1]["date"].replace(hour=0, minute=0))) + 1 # Day after final day else: t2 = int(mdates.date2num(end_date)) # User specified end date if cumulative == True: # To avoid spikey graph, "bin size" will group messages by week. Default is by week (7) bin_size = 1 # Always use 1 for cumulative data (as no risk of spikey data) time = [*range(t1,t2,bin_size)] # Create a list of time values # For larger bin sizes, the last few messages may have been sent half way through a week for example. Force the last date to be included here: if mdates.num2date(max(time)) != t2: final_date = max(time) + bin_size time.append(final_date) # Create y axes for sent and received messages participants_message_tally = dict([ (p,[]) for p in conversation.participants ]) # Dictionary of empty lists. One list for each person, containing all y values cnt = Counter() # Counter to track messages sent by each person # Track which message we're on (index j) j = 0 # start on first message if start_date != mdates.num2date(730120): # if user has specified a start date while conversation.message_log[j]["date"] < start_date: j += 1 eoc = len(conversation.message_log) # end of conversation # Tally up messages for i in range(0, len(time)): # Reset counter. Don't reset for cumulative data (eg. stack plot) if cumulative == False: for person in cnt: cnt[person] = 0 # Loop through messages sent on this date if j < eoc: # Make sure we haven't reached the last message (if j > eoc we get an error) while (msgdate := mdates.date2num(conversation.message_log[j]["date"])) <= time[i] and time[i] <= mdates.date2num(end_date): if msgdate <= time[i]: if group_messages: # Group messages (treat bursts of individual messages as one) c0 = j > 0 # Ignore first message c1 = conversation.message_log[j]["sender"] == conversation.message_log[j-1]["sender"] # Previous message was sent by the same person c2 = (conversation.message_log[j]["date"] - conversation.message_log[j-1]["date"]) < datetime.timedelta(seconds=30) # Previous message was sent <2 minutes from this message c_final = c0 * c1 * c2 # This is True if the participant has spammed lots of messages at once else: c_final = False # If group_messages is off, we are counting all messages if not c_final: cnt[conversation.message_log[j]["sender"]] += 1 # Tally a message for this chat participant # Go to next message, or break if we've reached the end j += 1 if j == eoc: break # Add new y value for each person for person in conversation.participants: participants_message_tally[person].append(cnt[person])
def test_auto_date_locator_intmult(): def _create_auto_date_locator(date1, date2): locator = mdates.AutoDateLocator(interval_multiples=True) locator.create_dummy_axis() locator.set_view_interval(mdates.date2num(date1), mdates.date2num(date2)) return locator d1 = datetime.datetime(1997, 1, 1) results = ( [ datetime.timedelta(weeks=52 * 200), [ '1980-01-01 00:00:00+00:00', '2000-01-01 00:00:00+00:00', '2020-01-01 00:00:00+00:00', '2040-01-01 00:00:00+00:00', '2060-01-01 00:00:00+00:00', '2080-01-01 00:00:00+00:00', '2100-01-01 00:00:00+00:00', '2120-01-01 00:00:00+00:00', '2140-01-01 00:00:00+00:00', '2160-01-01 00:00:00+00:00', '2180-01-01 00:00:00+00:00', '2200-01-01 00:00:00+00:00' ] ], [ datetime.timedelta(weeks=52), [ '1997-01-01 00:00:00+00:00', '1997-02-01 00:00:00+00:00', '1997-03-01 00:00:00+00:00', '1997-04-01 00:00:00+00:00', '1997-05-01 00:00:00+00:00', '1997-06-01 00:00:00+00:00', '1997-07-01 00:00:00+00:00', '1997-08-01 00:00:00+00:00', '1997-09-01 00:00:00+00:00', '1997-10-01 00:00:00+00:00', '1997-11-01 00:00:00+00:00', '1997-12-01 00:00:00+00:00' ] ], [ datetime.timedelta(days=141), [ '1997-01-01 00:00:00+00:00', '1997-01-22 00:00:00+00:00', '1997-02-01 00:00:00+00:00', '1997-02-22 00:00:00+00:00', '1997-03-01 00:00:00+00:00', '1997-03-22 00:00:00+00:00', '1997-04-01 00:00:00+00:00', '1997-04-22 00:00:00+00:00', '1997-05-01 00:00:00+00:00', '1997-05-22 00:00:00+00:00' ] ], [ datetime.timedelta(days=40), [ '1997-01-01 00:00:00+00:00', '1997-01-08 00:00:00+00:00', '1997-01-15 00:00:00+00:00', '1997-01-22 00:00:00+00:00', '1997-01-29 00:00:00+00:00', '1997-02-01 00:00:00+00:00', '1997-02-08 00:00:00+00:00' ] ], [ datetime.timedelta(hours=40), [ '1997-01-01 00:00:00+00:00', '1997-01-01 04:00:00+00:00', '1997-01-01 08:00:00+00:00', '1997-01-01 12:00:00+00:00', '1997-01-01 16:00:00+00:00', '1997-01-01 20:00:00+00:00', '1997-01-02 00:00:00+00:00', '1997-01-02 04:00:00+00:00', '1997-01-02 08:00:00+00:00', '1997-01-02 12:00:00+00:00', '1997-01-02 16:00:00+00:00' ] ], [ datetime.timedelta(minutes=20), [ '1997-01-01 00:00:00+00:00', '1997-01-01 00:05:00+00:00', '1997-01-01 00:10:00+00:00', '1997-01-01 00:15:00+00:00', '1997-01-01 00:20:00+00:00' ] ], [ datetime.timedelta(seconds=40), [ '1997-01-01 00:00:00+00:00', '1997-01-01 00:00:05+00:00', '1997-01-01 00:00:10+00:00', '1997-01-01 00:00:15+00:00', '1997-01-01 00:00:20+00:00', '1997-01-01 00:00:25+00:00', '1997-01-01 00:00:30+00:00', '1997-01-01 00:00:35+00:00', '1997-01-01 00:00:40+00:00' ] ], [ datetime.timedelta(microseconds=1500), [ '1996-12-31 23:59:59.999507+00:00', '1997-01-01 00:00:00+00:00', '1997-01-01 00:00:00.000502+00:00', '1997-01-01 00:00:00.001005+00:00', '1997-01-01 00:00:00.001508+00:00' ] ], ) for t_delta, expected in results: d2 = d1 + t_delta locator = _create_auto_date_locator(d1, d2) assert list(map(str, mdates.num2date(locator()))) == expected
import matplotlib.pyplot as plt import matplotlib.dates as mdate from chandratime import convert_chandra_time, convert_to_doy # Use today's date, plus 2 days end_date = dt.date.today() + dt.timedelta(days=2) sunday_pass = dt.datetime(2020, 8, 24, 2, 30) sunday_pass_end = dt.datetime(2020, 8, 24, 3, 27, 34) oneweek_pre_anomaly = dt.datetime(2020, 8, 18, 0) oneday_pre_anomaly = dt.datetime(2020, 8, 23, 0) eventdate = mdate.num2date(convert_chandra_time([714627954.9676153660])) fa6_heater_poweroff = dt.datetime(2020, 8, 24, 14, 38) hrc_poweroff_date = dt.datetime(2020, 8, 24, 15, 7, 26) morning_pass_time = dt.datetime(2020, 8, 24, 13, 45) evening_pass_time = dt.datetime(2020, 8, 24, 21, 20) tuesday_community_brief = dt.datetime(2020, 8, 25, 13, 0) wednesday_community_brief = dt.datetime(2020, 8, 26, 13, 0) cap_step_2 = dt.datetime(2020, 8, 27, 0, 13) cap_step_5 = dt.datetime(2020, 8, 27, 0, 24) cap_step_8 = dt.datetime(2020, 8, 27, 0, 40) # The famous 6am pass in which everything looked fine thursday_early_pass = dt.datetime(2020, 8, 27, 10, 0) thursday_early_pass_end = dt.datetime(2020, 8, 27, 11, 0)