def plot_delay(self, file_name): vol = decode_wave.read_wave_file(file_name) num_range = 52 x = range(num_range) goertzel_out = [np.zeros(num_range) for i in range(16)] for i in range(num_range): for ch in range(16): #goertzel_out[ch][i] = goertzel.goertzel(vol[ch][i:64+i],2) goertzel_out[ch][i] = goertzel.goertzel( vol[ch][64 * 10 + i:64 * 10 + 64 + i], 2, vol[ch][64 * 10 + i - 1]) print(goertzel_out[0]) plt.figure(figsize=(15, 9)) for ch in range(16): plt.plot(x, goertzel_out[ch], linewidth=0.5, marker='v', markersize=1, label=str(ch)) #plt.ylim(0,16*2**14) plt.xlim(0, num_range) plt.title(os.path.split(file_name)[1]) #plt.tight_layout() plt.ylabel('ADC count') plt.xlabel('Delay clk') plt.grid() file_name = file_name.replace('/wave_', '/process_') mom = decode_process.read_process_file(file_name) constant = 1 / 64e6 for ch in range(16): print(mom[10][ch] * constant) plt.show()
import matplotib.pyplot as plt x = [1, 2, 3] y = [1, 4, 9] plt.plot(x, y) plt.xlabel("X Axis") plt.ylabel("Y Axis") plt.title("My python first graph") plt.show()
#Parsear ser = pd.Series(['01 Jan 2010', '02-02-2011', '20120303', '2013/04/04', '2014-05-05', '2015-06-06T12:20']) from dateutil.parser import parse ser.map(lambda x: parse(x)) pd.to_datetime(ser) #%%Un poco de todo iris.SepalLength.rolling(2).sum() #%%Imput Data population.resample('A').first()interpolate('linear') #Imputar valores anualmente linealmente #%% MATPLOTLIB import matplotib.pyplot as plt plt.plot(df['Mes'], df['data science'], label='data science') plt.plot(df['Mes'], df['machine learning'], label='machine learning') plt.plot(df['Mes'], df['deep learning'], label='deep learning') plt.xlabel('Date') plt.ylabel('Popularity') plt.title('Popularity of AI terms by date') plt.grid(True) plt.legend() plt.text(x='2010-01-01', y=80, s=r'$\lambda=1, r^2=0.8$') #Coordinates use the same units as the graph plt.annotate('Notice something?', xy=('2014-01-01', 30), xytext=('2006-01-01', 50), arrowprops={'facecolor':'red', 'shrink':0.05}) fig, axes = plt.subplots(2,2) axes[0, 0].hist(df['data science']) axes[0, 1].scatter(df['Mes'], df['data science']) axes[1, 0].plot(df['Mes'], df['machine learning']) axes[1, 1].plot(df['Mes'], df['deep learning'])
import numpy as np K = 9100 Premium_Call = 179 Premium_Put = 185 Interval = 500 ST = np.arange(K - Interval, K + Interval) Payoff_LongCall = np.maximum(ST - K, 0) - Premium_Call Payoff_ShortCall = -Payoff_LongCall import matplotib.pyplot as plt plt.plot(ST, Payoff_LongCall) plt.plot(ST, Payoff_ShortCall) plt.show()