def __init__(self,drift_object,times_to_combine_str,save_plots=True):
        """
            Creates a drift_diagnositc object to load wavecal soln data into memory. Set's up outpath
        """
        self.drift = drift_object
        self.times_to_combine = [[strpdate2num("%Y%m%d-%H%M%S")(tstamp1),strpdate2num("%Y%m%d-%H%M%S")(tstamp2)] for [tstamp1,tstamp2] in times_to_combine_str]

        
        self.drift.mastercalFNs, self.params = getCalFileNames(self.drift.params,'mastercal_','_drift')
        self.save_plots=save_plots

        intermDir=self.params['intermDir']
        outdir=self.params['outdir']
        if intermDir is None or intermDir is '':
            intermDir = os.getenv('MKID_PROC_PATH', default="/Scratch")
        if outdir is None or outdir is '':
            outdir = '/waveCalSolnFiles'
        self.outpath=intermDir+outdir+os.sep+self.drift.driftFNs[0].run+os.sep+'master_cals'
        try:
            os.mkdir(self.outpath)
            os.mkdir(self.outpath+'/drift_study')
            if self.save_plots:
                os.mkdir(self.outpath+'/figs')
        except:
            pass
예제 #2
0
def regis_plot():
    fig,ax=plt.subplots()
    for item in ([ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()):
        item.set_fontsize(30)
    days1, impressions = numpy.loadtxt("tempcsharp/owl.csv", unpack=True,
                                      converters={ 0: mdates.strpdate2num('%Y-%m-%d')})
    days_s1, impressions_t = numpy.loadtxt("tempcsharp/owl.csv", unpack=True,dtype='str')

    impressions_log1 = []
    print len(impressions)
    for i in range(len(impressions)):
        impressions_log1.append(sum(impressions[0:i]))
    ax.plot_date(x=days1, y=impressions_log1, fmt="r-", label='owls', color='blue', lw=2)



    days2, impressions = numpy.loadtxt("tempcsharp/sparrow.csv", unpack=True,
                                    converters={ 0: mdates.strpdate2num('%Y-%m-%d')})
    days_s2, impressions_t = numpy.loadtxt("tempcsharp/sparrow.csv", unpack=True,dtype='str')
    impressions_log2 = []
    print len(impressions)
    for i in range(len(impressions)):
        impressions_log2.append(sum(impressions[0:i]))
    ax.plot_date(x=days2, y=impressions_log2, fmt="r-", label='sparrows', color='red', lw=2)



    days3, impressions = numpy.loadtxt("tempcsharp/all.csv", unpack=True,
                                  converters={ 0: mdates.strpdate2num('%Y-%m-%d')})
    days_s3, impressions_t = numpy.loadtxt("tempcsharp/all.csv", unpack=True,dtype='str')
    impressions_log3 = []
    print len(impressions)
    for i in range(len(impressions)):
        impressions_log3.append(sum(impressions[0:i]))
    ax.plot_date(x=days3, y=impressions_log3, fmt="r-", label='all', color='green', lw=2)


    ax.set_xlabel("Registration time")
    ax.set_ylabel("#Users")

    days1 = list(days1)
    days2 = list(days2)
    days3 = list(days3)
    output = open("datas/register.csv", 'w')
    for i in range(len(days1)):
        if days1[i] in days2 and days1[i] in days3:
            j = days2.index(days1[i])
            k = days3.index(days3[i])
            output.write(days_s1[i]+", "+str(impressions_log1[i])+", "+str(impressions_log2[j])+", "+str(str(impressions_log3[k]))+"\n")
    output.close()


    #plt.ylim([0,5])
    plt.legend(prop={'size':30}, loc = 2)
    plt.grid(True)
    plt.show()
    sys.exit(1)
예제 #3
0
파일: odv.py 프로젝트: regeirk/atlantis
    def read(self, url=None, fields=None, delimiter="\t"):
        """
        Reads data from ODV formatted file.

        Parameters
        ----------
        url : string, optional
            Full path and file name to save the data. If omitted,
            assumes path indicated at sequence initialization.
        fields : sequence, optional
            Sets the fields to be saved. Default is to save all fields
            in dataset.

        Returns
        -------
        dat : array like
            Structured array of file contents.

        """
        if fields is not None:
            raise ValueError("Not implemented yet.")
        # Reads the content of file.
        if url is None:
            url = self.url
        f = self._read_text_file(url)
        # Splits all content lines and analyzes the header to extract
        # additional information
        header, fields, skip_header = self._get_header(f, comments="//", delimiter=delimiter)
        keys = fields.keys()
        # Sets data converters according to field names.
        converters = dict()
        for i, key in enumerate(keys):
            if key == "YYYY-MM-DD":
                converters[i] = strpdate2num("%Y-%m-%d")
            elif key == "hh:mm":
                converters[i] = strpdate2num("%H:%M")
        # Converts data content in structured array using numpy.genfromtxt.
        dat_keys = [b"{:d}".format(a) for a in range(len(keys))]
        dat = genfromtxt(
            url, delimiter=delimiter, skip_header=skip_header + 1, dtype=None, names=dat_keys, converters=converters
        )
        # Sets data in field container.
        for dat_key, key in zip(dat_keys, keys):
            fields[key].data = dat[dat_key]
        # Updates class containers
        self.fields = fields
        # Update date and time.
        T0 = 693596.0  # strpdate2num('%H:%M')('00:00')
        self.time = fields["YYYY-MM-DD"].data + fields["hh:mm"].data - T0
        # Returns data structured array.
        return dat
예제 #4
0
def show_plot(times, values, np_test, name):
    x_val, y_val = np.loadtxt(
        np_test,
        delimiter=',',
        unpack=True,
        converters={0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S.%f')})

    # x_val = times
    # y_val = values
    # plt.hold(False)
    plt.title(name)
    plt.xlabel('Time')
    plt.ylabel('Values')
    plt.plot_date(x=x_val,
                  y=y_val,
                  marker='o',
                  markerfacecolor='red',
                  fmt='b-',
                  label='value',
                  linewidth=2)
    # plt.plot(x_val, y_val)
    # plt.plot(x_val, y_val, 'or')
    plt.savefig(os.path.join(MEDIA_FOLDER, 'plots', '%s.png' % name))
    plt.clf()
    plt.cla()
예제 #5
0
def test11():
    date = []; closep = []
    with open('data/twtr-10y.csv') as csvfile:
        reader = csv.DictReader(csvfile)
        dateconv = strpdate2num('%Y/%m/%d')
        for row in reader:
            date.append(dateconv(row['date']))
            closep.append(float(row['close']))
    date = np.array(date)
    closep = np.array(closep)


    ipo = closep[-1]

    fig = plt.figure()
    ax1 = plt.subplot2grid((1,1),(0,0))

    ax1.plot_date(date, closep, '-', label='price')
    ax1.plot([],[], color='g', alpha=0.5, linewidth=3, label='gain')
    ax1.plot([],[], color='r', alpha=0.5, linewidth=3, label='loss')

    ax1.fill_between(date, closep, ipo, where=closep>ipo, facecolor='g', alpha=0.5)
    ax1.fill_between(date, closep, ipo, where=closep<ipo, facecolor='r', alpha=0.5)

    for label in ax1.xaxis.get_ticklabels():
        label.set_rotation(45)
    ax1.xaxis.label.set_color('c')
    ax1.yaxis.label.set_color('r')
    ax1.grid(True)

    plt.subplots_adjust(bottom=0.20)
예제 #6
0
def readPingJSON(file):
    at_raw = json.load(open(file, 'r'))
    at_prob_rtt = {}
    for mes in at_raw:
        prob_id = mes['prb_id']
        if prob_id not in at_prob_rtt:
            at_prob_rtt[prob_id] = {'src_ip': mes['from'],
                                    'time_md': [],
                                    'avg': [],
                                    'min':[],
                                    'max':[],
                                    'loss':[],
                                    'time_epc':[]}
        epoch_time = mes['timestamp']
        at_prob_rtt[prob_id]['time_epc'].append(epoch_time)
        utc_string = time.strftime('%Y-%m-%d %H:%M:%S',time.gmtime(epoch_time))
        mdate_time = mdates.strpdate2num('%Y-%m-%d %H:%M:%S')(utc_string)
        at_prob_rtt[prob_id]['time_md'].append(mdate_time)
        at_prob_rtt[prob_id]['min'].append(float(round(mes['min'])))
        at_prob_rtt[prob_id]['avg'].append(float(round(mes['avg'])))
        at_prob_rtt[prob_id]['max'].append(float(round(mes['max'])))
        if mes['sent'] == 0:
            at_prob_rtt[prob_id]['loss'].append(100)
        else:
            at_prob_rtt[prob_id]['loss'].append((1-float(mes['rcvd'])/mes['sent'])*100)
    return at_prob_rtt
def run(infile, k, max_error=float('inf')):
    # extract features and segmented data from CSV file
    dates, data = pp.csv_import(infile)
    segd1 = sgt.bottom_up(data,k,calc_error=sgt.sqr_residual)
    segd2 = sgt.bottom_up(data,k,calc_error=sgt.relative_sqr_residual)

    # output some statistics
    print 'original data points: %d' % len(data)
    print 'square residual data points: %d' % len(segd1)
    print 'rel. square res data points: %d' % len(segd2)

    # convert dates to matplotlib.dates
    dates = map(mdates.strpdate2num('%Y-%m-%d'),dates)

    # plot segmented time series versus original
    fig, (orig_ts, seg1_ts, seg2_ts) = subplots(3, sharex=True)
    fig.set_size_inches(8,10)

    orig_ts.plot_date(dates,data,'b-')
    orig_ts.set_title('original data')
    seg_inds1, seg_vals1 = zip(*segd1)
    seg_dates1 = [dates[i] for i in seg_inds1]
    seg_inds2, seg_vals2 = zip(*segd2)
    seg_dates2 = [dates[i] for i in seg_inds2]
    seg1_ts.plot_date(seg_dates1, seg_vals1, 'r-')
    seg1_ts.set_title('abs. residual error segmenting')
    seg2_ts.plot_date(seg_dates2, seg_vals2, 'g-')
    seg2_ts.set_title('rel. residual error segmenting')

    # auto space the dates x-ticks
    fig.autofmt_xdate()
    show()
예제 #8
0
파일: test3.py 프로젝트: wuzhiyi/wzy-quant
def graphData(stock):
    try:
        stockFile = stock+'.txt'

        date, closep, highp, lowp, openp, volume = np.loadtxt(stockFile,delimiter=',',unpack=True,
                                                         converters={0: mdates.strpdate2num('%Y%m%d')})


        fig = plt.figure()
        ax1 = plt.subplot(1,1,1)
        ax1.plot(date, openp)
        ax1.plot(date, highp)
        ax1.plot(date, lowp)
        ax1.plot(date, closep)

        ax1.xaxis.set_major_locator(mticker.MaxNLocator(10))
        ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))

        for label in ax1.xaxis.get_ticklabels():
            label.set_rotation(45)


        plt.show()

    except Exception,e:
        print 'failed main loop',str(e)
예제 #9
0
def bytespdate2num(fmt, encoding='utf-8'):
    strconverter = mdates.strpdate2num(fmt)

    def bytesconverter(b):
        s = b.decode(encoding)
        return strconverter(s)
    return bytesconverter
def convert_date(date_format, encoding='utf-8'):
    string_converter = mdates.strpdate2num(date_format)

    def bytes_converter(b):
        s = b.decode(encoding)
        return string_converter(s)
    return bytes_converter
예제 #11
0
    def request_market_data(self, timeframe, interval, symbol, sectype, \
                                        exchange, currency=None, expiry=None, \
                                        primexch=None, latestdate=None):
        # Establish a connection
        sys.stdout.write("\nCalling connection\n")
        connection = ibConnection()
        connection.register(self.ibhndl.my_callback_handler, \
                                                        message.historicalData)
        connection.connect()
 
        #Contract
        contract = Contract()
        contract.m_symbol = symbol
        contract.m_secType = sectype
        contract.m_exchange = exchange
        contract.m_currency = currency
 
        if primexch:
            contract.m_primaryExch = primexch
 
        if expiry:
            contract.m_expiry = expiry
 
        # Get historical data
        rtnData = self.ibhndl.reqHistoricalData(contract, interval, connection,\
                                                        timeframe, latestdate)
        connection.disconnect()
 
        if not rtnData[0]:
            sys.stderr.write("ERROR: No data return for %s : %s\n" % (symbol,\
                                                                    interval)) 
            return rtnData, ""

        dateList = list()
        stockFile = list()
        for data, volume in zip(rtnData[0], rtnData[1]):
            dateList = dateList + [data[0]]
            dataStr = '%s, %s, %s, %s, %s, %s' % \
                        (strftime("%Y-%m-%d %H:%M:%S", \
                              localtime(int(str(data[0]))/1000)), data[1], \
                              data[2], data[3], data[4], str(volume[1]))
        
            stockFile = stockFile + [dataStr]
        
        convertStr = '%Y-%m-%d %H:%M:%S'
        date, _, _, _, closep, volume = \
                        np.loadtxt(stockFile, delimiter=',', unpack=True, \
                       converters={0:mdates.strpdate2num(convertStr)})
        
        #PATTERNS
        retpat = []
        try:
            patterndb = PatternDB()
            patterndb.add(HS())
            patterndb.add(IHS())
        
            retpat = patterndb.check(closep[-60:], date[-60:])
        except Exception, excp:
            sys.stderr.write("ERROR: PATTERNS failed with exception " \
                                                            "%s\n" % excp)
def graphRawFX():
    date,bid,ask = np.loadtxt('GBPUSD1d.txt', unpack=True,
                              delimiter=',',
                              converters={0:mdates.strpdate2num('%Y%m%d%H%M%S')})

    fig=plt.figure(figsize=(10,7))

    ax1 = plt.subplot2grid((40,40), (0,0), rowspan=40, colspan=40)
    ax1.plot(date,bid)
    ax1.plot(date,ask)
    plt.gca().get_yaxis().get_major_formatter().set_useOffset(False)

    ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H:%M:%S'))
    #####
    plt.grid(True)
    for label in ax1.xaxis.get_ticklabels():
            label.set_rotation(45)

    #######
    ax1_2 = ax1.twinx()
    
    #ax1_2.plot(date, (ask-bid))
    
    ax1_2.fill_between(date, 0, (ask-bid), facecolor='g',alpha=.3)
    
    #ax1_2.set_ylim(0, 3*ask.max())
    #######
    
    plt.subplots_adjust(bottom=.23)
    #plt.grid(True)
    
    plt.show()
예제 #13
0
def graph(csv_file, filename):
    '''Create a line graph from a two column csv file.'''
    unit = configs['unit']
    date, value = np.loadtxt(csv_file, delimiter=',', unpack=True,
                             converters={0: mdates.strpdate2num('%H:%M:%S')}
                             )
    fig = plt.figure(figsize=(10, 3.5))
    fig.add_subplot(111, axisbg='white', frameon=False)
    rcParams.update({'font.size': 9})
    plt.plot_date(x=date, y=value, ls='solid', linewidth=2, color='#FB921D',
                  fmt=':'
                  )
    title = "Sump Pit Water Level {}".format(time.strftime('%Y-%m-%d %H:%M'))
    title_set = plt.title(title)
    title_set.set_y(1.09)
    plt.subplots_adjust(top=0.86)

    if unit == 'imperial':
        plt.ylabel('inches')
    if unit == 'metric':
        plt.ylabel('centimeters')
    
    plt.xlabel('Time of Day')
    plt.xticks(rotation=30)
    plt.grid(True, color='#ECE5DE', linestyle='solid')
    plt.tick_params(axis='x', bottom='off', top='off')
    plt.tick_params(axis='y', left='off', right='off')
    plt.savefig(filename, dpi=72)
    def populate_master_times_data(self):
        if not hasattr(self, 'masterFNs'):
            self.masterFNs,p = getCalFileNames(self.params,'mastercal_','_drift.h5')
        if len(self.masterFNs)==0:
            print "No master cal files found!"
            return

        print "Collecting master wvlcal start and end times for "+str(self.driftFNs[0].run)+'...'
        self.master_start_time = np.zeros(len(self.masterFNs))
        self.master_end_time = np.zeros(len(self.masterFNs))
        for i in range(len(self.masterFNs)):
            try:
                driftFile=tables.openFile(self.masterFNs[i].mastercalDriftInfo(),mode='r')
                drift_times = driftFile.root.params_drift.drifttimes.cols.wavecal[:]
                drift_num_times = [strpdate2num("%Y%m%d-%H%M%S")(fname.split('_')[1].split('.')[0]) for fname in drift_times]
                #print drift_times
                self.master_start_time[i] = np.amin(drift_num_times)
                self.master_end_time[i] = np.amax(drift_num_times)
                driftFile.close()
            except:
                
                print '\tUnable to open: '+self.masterFNs[i].mastercalDriftInfo()

        #print self.master_start_time
        #print self.master_end_time
        print "\tDone."
예제 #15
0
	def comp_s(self,stock_name):
		"""
		Update the Stock information from the User Input
		"""
		self.str_msg_var.set(get_info(stock_name))
		self.stock_name_var.set(stock_name)

		start_d = self.start_date_entry.selection
		end_d = self.end_date_entry.selection
		if start_d == None:
			start_d = calendar.datetime.datetime(2014,1,1)
		if end_d == None:
			end_d = calendar.datetime.datetime.now()

		# print(start_d,end_d)

		s = get_history(self.stock_name_var.get(), start_d.year, start_d.month, start_d.day, end_d.year, end_d.month, end_d.day)
		fname = "newdoc.csv"
		f = open(fname,"w+")
		f.write(s)
		f.close()
		self.data = extract_data(fname)


		figure1 = Figure(figsize=(6,4),dpi=100)
		figure1a = figure1.add_subplot(111)
		test_x = arange(0.0,3.0,0.01)
		test_y = sin(2*pi*test_x)
		x = list(map(mdates.strpdate2num("%m/%d/%Y"),map(lambda x: x[0],self.data[1:])))
		y = list(map(lambda x: x[-1],self.data[1:]))
		x = x[::-1]
		y = y[::-1]
		# print(x)
		call(["python2", "orangehelper.py"])
		# [reg,tpl] = analysetrend(fname,"trend.csv",0.7)
		# print(reg)
		# print(tpl)
		# tpl = tpl[::-1]
		tpl2 = []
		xtpl = []
		tpfn = open("orangetrend.txt")
		for each in tpfn:
			if each == "":
				continue
			line = each.strip().split(",")
			tpl2.append(float(line[-1]))
			xtpl.append(float(line[0]))


		figure1a.plot_date(x,y,"b-")
		figure1a.plot_date(xtpl,tpl2,"r-")

		figure1.autofmt_xdate(bottom=0.2, rotation=30, ha='right')
		dataPlot = FigureCanvasTkAgg(figure1, master=self.frame)
		dataPlot.show()
		dataPlot.get_tk_widget().grid(row=1,column=0,columnspan=2,sticky=W+N+S)
		

		self.suggestion()
예제 #16
0
def test08():
    url = 'http://www.quandl.com/api/v1/datasets/EUROSTAT/CRIM_PLCE_42.csv'
    # https://theodi.org/blog/how-to-use-r-to-access-data-on-the-web
    f = urllib2.urlopen(url)
    s = f.read().strip().split('\n')[1:]
    f.close()
    date, value = np.loadtxt(s, delimiter=',', unpack=True, converters={0:strpdate2num('%Y-%m-%d')})
    plt.plot_date(date, value, '-', label=myname())
예제 #17
0
파일: graph29.py 프로젝트: Mausy5043/domog
def bytespdate2num(fmt, encoding='utf-8'):
    # convert datestring to proper format for numpy.loadtext()
    strconverter = strpdate2num(fmt)

    def bytesconverter(b):
        s = b.decode(encoding)
        return strconverter(s)
    return bytesconverter
예제 #18
0
def graphData(stock):
    try:
        stockFile = 'dataDaily/'+stock+'.txt'

        date,closep,highp,lowp,openp,volume = np.loadtxt(stockFile,delimiter=',',unpack=True,
                                                         converters={0:mdates.strpdate2num('%Y%m%d')})

        fig = plt.figure()
        
        # subplot 1: price
        #ax1 = plt.subplot(2,1,1)#(2,3,6), 2X3, and at place 6
        ax1 = plt.subplot2grid((5,4),(0,0),rowspan=4,colspan=4)
        
        ax1.plot(date,openp)
        ax1.plot(date,highp)
        ax1.plot(date,lowp)
        ax1.plot(date,closep)
        # label
        #plt.xlabel('Date')
        plt.ylabel('Price ($)')
        plt.title(stock)

        # set tick label
        plt.setp(ax1.get_xticklabels(),visible=False)

        #grid
        ax1.grid(True)
        
        ax1.xaxis.set_major_locator(mticker.MaxNLocator(10))
        #ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))

        for label in ax1.xaxis.get_ticklabels():
            label.set_rotation(45)

        # subplot 2: volume
        #ax2 = plt.subplot(2,1,2,sharex=ax1) #share axis, sync the axis
        ax2 = plt.subplot2grid((5,4),(4,0),sharex=ax1,rowspan=1,colspan=4)
        # remove y axis tick lable for subplot2
        ax2.axes.yaxis.set_ticklabels([])
        
        ax2.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
        
        ax2.bar(date,volume)
        ax2.grid(True)
        plt.xlabel('Date')
        plt.ylabel('Volume')

        for label in ax2.xaxis.get_ticklabels():
            label.set_rotation(45)

        #adjust plot spaces
        plt.subplots_adjust(left=.09,bottom=.17,right=.93,top=.93,wspace=.20,hspace=.00)
        
        plt.show()
        fig.savefig('example.png')

    except Exception, e:
        print 'failed main loop',str(e)
예제 #19
0
def interp_from_file(in_file_path, dates_out, date_string_format =  '%Y-%m-%d %H:%M',
	csv_delimiter =',' , skip_header = 1, quotechar='\"'
	  ) : 
    """
    Performs linear interpolation from an input csv file with lin_time_interp

    Parameters
    ----------
    in_file_path : input csv file following format 
		TIMESTAMP,val
		2012-01-01 12:00,0.0
		2012-02-01 13:00,0.0

		A header line must be present, but column names may be chosen freely.
		The first column should be the date in string format.
		The second column should 

    dates_out : list of datetime objects, as generated by gen_time_seq(...).

    date_string_format : date string format used in input file, 
                         see https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior

    csv_delimiter, skip_header, quotechar : parameters for numpy.genfromtxt(...)
    
    Returns
    -------

    vals_out : a numpy array with interpolated values at t_out

    Examples
    --------
    >>> dates_out = gen_time_seq(date_start_string = '2015-03-02 12:00' ,tstep = 60 ,n_tstep = 120)
    >>> vals_out = interp_from_file('./data_in.dat', dates_out  )

    """

    # read observed data

    try :
	datenums_in, vals_in = np.genfromtxt(in_file_path,delimiter=csv_delimiter, 
		unpack=True,skip_header=skip_header,
		converters={ 0: mdates.strpdate2num(date_string_format)}
		)

    except :
	print('Error while reading file ' + in_file_path + '.\n'+
		'Check file path and format (should be a 2 column CSV file).'
		)
	return(None)

    # convert datesnums to datetime
    dates_in = mdates.num2date(datenums_in)

    # compute vals_out 
    vals_out = lin_time_interp(dates_in, vals_in, dates_out)

    return( vals_out ) 
def plot_ts_data(inputfilepath):
    days,  time = np.loadtxt(inputfilepath, delimiter = ',', unpack = True, skiprows = 3,
                             converters = {0: mdates.strpdate2num('"%m-%d"')})
    plt.plot_date(x = days, y = time, fmt = 'r-')
    plt.title("Monthly Sales of Plastic Manufacturer's Product-A")
    plt.xlabel('Date-Time')
    plt.ylabel('Sales Qty')
    plt.grid(True)
    plt.show()
예제 #21
0
def plot_ac(type):
    days, impressions = numpy.loadtxt("tempcsharp/"+type+".csv", unpack=True,
                                   converters={ 0: mdates.strpdate2num('%Y-%m-%d')})

    plt.plot_date(x=days, y=impressions, fmt="r-")
    plt.xlabel("time")
    plt.ylabel("#users")
    #plt.ylim([0,5])
    plt.grid(True)
    plt.show()
예제 #22
0
    def array_builder(self):
        for i in range(len(self.CsvFile.data_set)):
            key = "{0:s}:{1:s}".format(self.CsvFile.data_set[i][1], self.CsvFile.data_set[i][2])
            data_range = mdates.strpdate2num(self.data_array)(self.CsvFile.data_set[i][6])
            data_value = self.CsvFile.data_set[i][7]
            self.csv_array.setdefault(key, [[], []])
            self.csv_array[key][0].append(data_range)
            self.csv_array[key][1].append(data_value)

        return self.csv_array
예제 #23
0
파일: plot.py 프로젝트: arsh0r/heatmeter
def read_datafile(file_name):
   with lzma.open(file_name, "r") as f:
      for line in f:
         if line.startswith('ts,'):
            names=line.rstrip().split(',')
            break
      data = np.genfromtxt(f, delimiter=',', comments='#', skiprows=2, names=names,
                           usecols=(range(0,7)),
                           converters={0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S')})
   return data
예제 #24
0
 def display_price(self):
     sorted_dates = sorted(self.profile.keys())
     plt.plot_date([mdates.strpdate2num('%Y-%m-%d')(day) for day in sorted_dates],
                   self.asset_closing_prices(),
                   fmt="k-o")
     plt.title(self.ticker + " Closing Prices")
     plt.ylabel("Daily Prices")
     plt.xlabel("Historical Dates")
     plt.grid(True)
     plt.show()
예제 #25
0
def main():
    #fname = sys.argv[1]
    hours, revenues = np.loadtxt("all.revenue",
                                 delimiter='\t',
                                 unpack=True,
                                 converters={0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S')})
    plt.bar(hours, revenues)
    plt.title("Hour")
    plt.ylabel("Revenue")
    plt.grid(True)
    plt.show()
예제 #26
0
def avg_current(sums, current_day):
    days2, impressions2 = numpy.loadtxt("tempcsharp/all.csv", unpack=True,
                                        converters={ 0: mdates.strpdate2num('%Y-%m-%d')})
    impressions_log2 = []
    nr_users = 0
    for i in range(len(days2)):
        if days2[i]>=current_day:
            nr_users = sum(impressions2[0:i])
            #print nr_users
            break
    return nr_users
def loadDatetimeBidAskFromCSVdataFile(ifilename, skipfraction=0):
    print 'loading csv text data from file (', ifilename, ') into \
multidimensional array (stripping date to raw number format)'
    date, bid, ask = np.loadtxt(
        ifilename,
        delimiter=',',  # text data file delimiter
        converters={0: mdates.strpdate2num('%Y%m%d%H%M%S')},  # convert\column 0 using matplotlib.dates "strip date to number" with the given format
        skiprows=int(skipfraction * countLinesInFile(ifilename)),
        unpack=True
        )
    return date, bid, ask
예제 #28
0
def bytesdate2num(fmt,encoding = 'utf-8'):
    print fmt
    strconverter = mdates.strpdate2num(fmt)
    print strconverter
    def bytesconverter(b):
        print b
        s = b.decode(encoding)
        print s
        print(strconverter(s))
        return strconverter(s)
    return bytesconverter
def makeHistogram(filename, outname):
    days, scores = np.loadtxt(filename, unpack=True, delimiter=",",
                        converters={ 0: mdates.strpdate2num('%d-%b-%Y')})
    mask = reject_outliers(days)
    days = days[mask]
    scores = scores[mask]
    plt.plot_date(x=days, y=scores, marker='.')
    plt.xlabel("Patent Application Date")
    plt.ylabel("Jaccard Similarity (1.0 is all shared tags) ")
    plt.savefig(outname+'.png')
    plt.clf()
    plt.close()
예제 #30
0
def graphRawFX():
    date,bid,ask = np.loadtxt('GBPUSD1d.txt', unpack=True,
                              delimiter=',',
                              converters={0: mdates.strpdate2num('%Y%m%d%H%M%S')})

    fig = plt.figure(figsize=(10, 7))
    ax1 = plt.subplot2grid((40, 40), (0, 0), rowspan=40, colspan=40)
    ax1.plot(date, bid)
    ax1.plot(date, ask)
    ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H:%M:%S'))
    plt.grid(True)
    plt.show()
예제 #31
0
def graphRawGBP2USD1day():
    
    print 'start of graphRawGBP2USD1day()'
    
    print 'loading csv text data from file into multidimensional array (stripping date to raw number format)'
    
    date, bid, ask = np.loadtxt(
                                '/home/Slava/workspace/SentdexTutorial/src/AlgoTradingTutorial/GBPUSD1d.txt',    # file name                                
                                delimiter=',',  # text data file delimiter 
                                converters={0:mdates.strpdate2num('%Y%m%d%H%M%S')}, # convert column 0 using matplotlib.dates "strip date to number" with the given format
                                unpack=True
                                )
    
    print 'len(date) = ',len(date),' (number of data entries)'
    
    #fig = plt.figure(figsize=(10,7)) # unused variable 'fig' ?
    
    # this was the original command in the video. subplot2grid isn't recognized...
    ''' @bug here ''''''ax1 = plt.subplot2grid(40,40
                           (40,40),
                           (0,0),
                           rowspan=40, 
                           colspan=40)'''
      
    #ax1 = plt.subplot(2,1,1)    
    #ax1.plot(date,bid)
    
    #ax1 = plt.subplot(2,1,2) 
    #ax1.plot(date,ask)

    ax1 = plt.subplot(1,1,1)
    ax1.plot(date,bid,'b',date,ask,'r')
    
    plt.title('GBP to USD for May 1, 2013')
    plt.xlabel('time')
    plt.ylabel('price')

    print 'setting y-axis offset (intercept)'
    ''' @bug here ''' # plt.gca().get_yaxis().get_major_formatter().set_useOffset(False)
    #plt.gca().set_ybound(lower=0, upper=ax1.plot.get_ylim()[2])
    plt.subplots_adjust(bottom=.23)
    
    print 'setting x-axis labels format\n(since data is for a single day, the date part is somewhat of an overkill...)'
    ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H:%M:%S'))
    
    print 'setting x-axis labels rotation'
    for label in ax1.xaxis.get_ticklabels():
        label.set_rotation(45)
                    
    print 'twinning x-axis values with the previous plot' 
    ax1_2 = ax1.twinx()
    
    print 'creating plot for bid-ask spread and formatting it'
    ax1_2.fill_between(
                       date,            # x
                       0,               # y1 (from): x-axis
                       (ask-bid),       # y2 (to): bid-ask spread
                       facecolor='g',   # format: green
                       alpha=.3)        # format: alpha
    
    print 'setting grid'
    plt.grid(True)

    print 'showing plot'
    plt.show()
    
    print 'end of graphRawGBP2USD1day()'
def convert_date(date_bytes):
    return mdates.strpdate2num('%m/%d/%Y %H:%M')(date_bytes.decode('ascii'))
# We get the dataset URI that we are interested in
dataSetURIs = pyGDP.getDataSetURI()
for d in dataSetURIs:
    print d

# Set our datasetURI
dataSetURI = 'dods://cida.usgs.gov/qa/thredds/dodsC/prism'
# Get the available data types associated with the dataset
dataType = 'ppt'
# Get available time range on the dataset
timeRange = pyGDP.getTimeRange(dataSetURI, dataType)
for t in timeRange:
    print t
timeBegin = '1900-01-01T00:00:00.000Z'
timeEnd = '2011-11-01T00:00:00.000Z'
print

textFile = pyGDP.submitFeatureWeightedGridStatistics(OKshapefile, dataSetURI,
                                                     dataType, timeBegin,
                                                     timeEnd, usr_attribute,
                                                     usr_value)

jd, precip = np.loadtxt(
    textFile,
    unpack=True,
    skiprows=3,
    delimiter=',',
    converters={0: mdates.strpdate2num('%Y-%m-%dT%H:%M:%SZ')})

print 'Some data:'
print precip[0:100]
예제 #34
0
sql = "SELECT * FROM Ester ORDER BY read_time limit 1440"

#plot run for Ester outside
graphArray = []
for row in c.execute(sql):
    startingInfo = str(row).replace(')', '').replace('(', '').replace(
        'u\'', '').replace("'", "")
    splitInfo = startingInfo.split(',')
    graphArrayAppend = splitInfo[1] + ',' + splitInfo[4]
    graphArray.append(graphArrayAppend)

read_time, value = np.loadtxt(
    graphArray,
    delimiter=',',
    unpack=True,
    converters={0: mdates.strpdate2num(' %Y-%m-%d %H:%M:%S')})
outside, = plt.plot_date(x=read_time,
                         y=value,
                         fmt='mD-',
                         markersize=2,
                         label='Outdoor Humidity',
                         linewidth=2)

#new sql query for office floor temp
conn = sqlite3.connect('wormbin.db')
c = conn.cursor()
wordUsed = 'read_time'
sql = "SELECT * FROM Heidi ORDER BY read_time limit 1440"

#plot run for Heidi inside
graphArray = []
예제 #35
0
def graph_data(stock):
    # clump same logic together
    # part 1
    fig = plt.figure()
    ax1 = plt.subplot2grid([1, 1], [0, 0])

    # part 2
    stock_price_url = 'http://chartapi.finance.yahoo.com/instrument/1.0/'\
        +stock+ '/chartdata;type=quote;range=10y/csv'

    source_code = urllib.urlopen(stock_price_url).read().decode()
    stock_data = []
    split_source = source_code.split('\n')

    for line in split_source:
        split_line = line.split(',')
        if len(split_line) == 6:
            if 'values' not in line:
                stock_data.append(line)

    date, closep, highp, lowp, openp, volume = np.loadtxt(
        stock_data,
        delimiter=',',
        unpack=True,
        converters={0: mdates.strpdate2num('%Y%m%d')})
    ax1.plot(date, closep)
    ax1.plot(date, openp)
    x = 0
    y = len(date)
    ohlc = []

    while x < y:
        data_join = date[x], openp[x], highp[x], lowp[x], closep[x], volume[x]
        ohlc.append(data_join)
        x += 1

    for label in ax1.xaxis.get_ticklabels():
        label.set_rotation(45)

    ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
    ax1.xaxis.set_major_locator(mticker.MaxNLocator(12))
    ax1.grid(True)

    # Annotation example
    # bbox_props = dict(boxstyle='round', fc='w', ec='k', lw=1)
    bbox_props = dict(boxstyle='larrow', fc='w', ec='k', lw=1)
    ax1.annotate(str(closep[-1]), (date[-1], closep[-1]),
                 xytext=(date[-1] + 100, closep[-1]),
                 bbox=bbox_props)

    # part 3
    plt.xlabel('date')
    plt.ylabel('price')
    plt.title('Interesting Graph')
    plt.legend()
    plt.subplots_adjust(left=0.09,
                        bottom=0.18,
                        right=0.94,
                        top=0.93,
                        wspace=0.2,
                        hspace=0)
    plt.show()
def graphData(stock, MA1, MA2, period, numdays):
    fig.clf()
    candleWidth = .05
    try:
        try:
            print 'pulling data on', stock

            now = time.time()
            then = time.time() - (numdays * 86400)

            elPeriod = 0
            if period == 5:
                elPeriod = 300
                candleWidth = .001
            elif period == 15:
                elPeriod = 900
                candleWidth = .005
            elif period == 30:
                elPeriod = 1800
                candleWidth = .01
            elif period == 2:
                elPeriod = 7200
                candleWidth = .03
            elif period == 4:
                elPeriod = 14400
                candleWidth = .07
            elif period == 24:
                elPeriod = 86400
                candleWidth = .5
            else:
                elPeriod = 86400
                candleWidth = .5

            urlToVisit = 'https://poloniex.com/public?command=returnChartData&currencyPair=BTC_' + stock + '&start=' + str(
                int(then)) + '&end=9999999999&period=' + str(elPeriod)

            stockFile = []
            try:
                sourceCode = urllib2.urlopen(urlToVisit)
                json_object = json.loads(sourceCode.read())
                for d in json_object:

                    fixedDate = str(
                        datetime.datetime.fromtimestamp(int(
                            d['date'])).strftime('%Y-%m-%d %H:%M:%S'))
                    tclose = d['close']
                    thigh = d['high']
                    tlow = d['low']
                    topen = d['open']
                    tvolume = d['volume']
                    try:
                        theAppendLine = '{0},{1},{2},{3},{4},{5},'.format(
                            fixedDate, tclose, thigh, tlow, topen, tvolume)

                        stockFile.append(theAppendLine)
                    except Exception, e:
                        print str(e)

            except Exception, e:
                print str(e, 'failed to organize data')

        except Exception, e:
            print str(e, 'failed to pull price data')

        date, closep, highp, lowp, openp, volume = np.loadtxt(
            stockFile,
            delimiter=',',
            unpack=True,
            usecols=range(6),
            converters={0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S')})

        x = 0
        y = len(date)
        candleAr = []

        while x < y:
            appendLine = date[x], openp[x], closep[x], highp[x], lowp[
                x], volume[x]
            candleAr.append(appendLine)
            x += 1

        Av1 = movingaverage(closep, MA1)
        Av2 = movingaverage(closep, MA2)

        SP = len(date[MA2 - 1:])

        label1 = str(MA1) + ' SMA'
        label2 = str(MA2) + ' SMA'

        ax1 = plt.subplot2grid((7, 4), (1, 0),
                               rowspan=4,
                               colspan=4,
                               axisbg='#07000d')
        _candlestick(ax1,
                     candleAr[-SP:],
                     width=candleWidth,
                     colorup='#53c156',
                     colordown='#ff1717')

        ax1.plot(date[-SP:], Av1[-SP:], '#e1edf9', label=label1, linewidth=1.5)
        ax1.plot(date[-SP:], Av2[-SP:], '#4ee6fd', label=label2, linewidth=1.5)

        ax1.grid(True, color='w')
        ax1.xaxis.set_major_locator(mticker.MaxNLocator(10))
        ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
        plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper'))
        ax1.yaxis.label.set_color('w')
        ax1.spines['bottom'].set_color('#5998ff')
        ax1.spines['top'].set_color('#5998ff')
        ax1.spines['left'].set_color('#5998ff')
        ax1.spines['right'].set_color('#5998ff')
        ax1.tick_params(axis='y', colors='w')
        ax1.tick_params(axis='x', colors='w')
        plt.ylabel('Stock Price and Volume')

        maLeg = plt.legend(loc=9,
                           ncol=2,
                           prop={'size': 7},
                           fancybox=True,
                           borderaxespad=0.)
        maLeg.get_frame().set_alpha(0.4)
        textEd = pylab.gca().get_legend().get_texts()
        pylab.setp(textEd[0:5], color='w')

        ax0 = plt.subplot2grid((7, 4), (0, 0),
                               sharex=ax1,
                               rowspan=1,
                               colspan=4,
                               axisbg='#07000d')

        rsi = rsiFunc(closep)
        rsiCol = '#c1f9f7'
        posCol = '#386d13'
        negCol = '#8f2020'
        ax0.plot(date[-SP:], rsi[-SP:], rsiCol, linewidth=1.5)
        ax0.axhline(70, color=negCol)
        ax0.axhline(30, color=posCol)
        ax0.fill_between(date[-SP:],
                         rsi[-SP:],
                         70,
                         where=(rsi[-SP:] >= 70),
                         facecolor=negCol,
                         edgecolor=negCol)
        ax0.fill_between(date[-SP:],
                         rsi[-SP:],
                         30,
                         where=(rsi[-SP:] <= 30),
                         facecolor=posCol,
                         edgecolor=posCol)
        ax0.set_ylim(0, 100)

        ax0.spines['bottom'].set_color('#5998ff')
        ax0.spines['top'].set_color('#5998ff')
        ax0.spines['left'].set_color('#5998ff')
        ax0.spines['right'].set_color('#5998ff')
        ax0.text(0.015,
                 0.95,
                 'RSI (14)',
                 va='top',
                 color='w',
                 transform=ax0.transAxes)
        ax0.tick_params(axis='y', colors='w')
        ax0.tick_params(axis='x', colors='w')
        ax0.set_yticks([30, 70])

        volumeMin = 0

        ax1v = ax1.twinx()
        ax1v.fill_between(date[-SP:],
                          volumeMin,
                          volume[-SP:],
                          facecolor='#00ffe8',
                          alpha=.5)
        ax1v.axes.yaxis.set_ticklabels([])
        ax1v.grid(False)
        ax1v.spines['bottom'].set_color('#5998ff')
        ax1v.spines['top'].set_color('#5998ff')
        ax1v.spines['left'].set_color('#5998ff')
        ax1v.spines['right'].set_color('#5998ff')
        ax1v.set_ylim(0, 2 * volume.max())

        ax2 = plt.subplot2grid((7, 4), (5, 0),
                               sharex=ax1,
                               rowspan=1,
                               colspan=4,
                               axisbg='#07000d')
        fillcolor = '#00ffe8'
        nslow = 26
        nfast = 12
        nema = 9

        emaslow, emafast, macd = computeMACD(closep)
        ema9 = ExpMovingAverage(macd, nema)

        ax2.plot(date[-SP:], macd[-SP:], color='#4ee6fd', lw=2)
        ax2.plot(date[-SP:], ema9[-SP:], color='#e1edf9', lw=1)
        ax2.fill_between(date[-SP:],
                         macd[-SP:] - ema9[-SP:],
                         0,
                         alpha=0.5,
                         facecolor=fillcolor,
                         edgecolor=fillcolor)
        ax2.text(0.015,
                 0.95,
                 'MACD 12,26,9',
                 va='top',
                 color='w',
                 transform=ax2.transAxes)

        ax2.spines['bottom'].set_color('#5998ff')
        ax2.spines['top'].set_color('#5998ff')
        ax2.spines['left'].set_color('#5998ff')
        ax2.spines['right'].set_color('#5998ff')
        ax2.tick_params(axis='y', colors='w')
        ax2.tick_params(axis='x', colors='w')
        ax2.yaxis.set_major_locator(mticker.MaxNLocator(nbins=5,
                                                        prune='upper'))
        plt.ylabel('MACD', color='w')

        ax3 = plt.subplot2grid((7, 4), (6, 0),
                               sharex=ax1,
                               rowspan=1,
                               colspan=4,
                               axisbg='#07000d')
        rvi, rviSig = rviFunc(closep, openp, highp, lowp, volume)
        rviCol = '#c1f9f7'
        ax3.plot(date[-SP:], rvi[-SP:], rviCol, linewidth=1.5)
        ax3.plot(date[-SP:], rviSig[-SP:], '#5998ff', linewidth=1.5)

        ax3.spines['bottom'].set_color('#5998ff')
        ax3.spines['top'].set_color('#5998ff')
        ax3.spines['left'].set_color('#5998ff')
        ax3.spines['right'].set_color('#5998ff')
        ax3.text(0.015,
                 0.95,
                 'RVI (10)',
                 va='top',
                 color='w',
                 transform=ax3.transAxes)
        ax3.tick_params(axis='y', colors='w')
        ax3.tick_params(axis='x', colors='w')

        for label in ax3.xaxis.get_ticklabels():
            label.set_rotation(45)

        plt.subplots_adjust(left=.09,
                            bottom=.14,
                            right=.94,
                            top=.95,
                            wspace=.20,
                            hspace=0)

        plt.suptitle(stock, color='w')

        plt.setp(ax0.get_xticklabels(), visible=False)
        plt.setp(ax1.get_xticklabels(), visible=False)
        plt.setp(ax2.get_xticklabels(), visible=False)
예제 #37
0
pitch, \
yaw, \
speed, \
course, \
latitude, \
longitude, \
altitude, \
pdop, \
hdop, \
vdop, \
epe, \
fix, \
satellites_view, \
satellites_used, \
temp = np.loadtxt(datafile, delimiter=',', unpack=True,
                  converters={1: mdates.strpdate2num('%H%M%S%f'),
                              0: mdates.strpdate2num('%y%m%d')},
                  skiprows=1)

print('Read \'%s\' successfully.' % datafile)

# A course of 0° means the Car is traveling north bound
# and 90° means it is traveling east bound.
# In the Calculation following, East is Zero and North is 90°
# We need an offset.
course = (-course + 90.0)

# <headingcell level=2>

# Measurement Function H
예제 #38
0
    def cluster_power(self, fnames, nps, u_nps):
        t_pwr = []
        pwr = []
        u_pwr = []
        t_det = []
        det = []
        levels = []
        # plot the average power
        plot = ahp.ah2d()
        colors = ['#E3AE24', '#B63F97', '#5C6F7B', '#5C8727', '#7E543A', '#A3792C', '#7299C6', '#B8B308']
        for fname in fnames:
            # check if an alex fileset or a brian file
            if "pwr" in fname:
                # open power file
                t_pwr_new, pwr_new, u_pwr_new = \
                    np.loadtxt(expanduser("~") + '/data/tena/' + fname + '.dat',
                               delimiter=",", dtype=object,
                               converters={0: strpdate2num("%d-%b-%Y %H:%M:%S"),
                                           1: float, 2: float}, unpack=True)
                t_pwr_new = np.array(t_pwr_new) * 3600. * 24.
                t_det_new, det_new, _ = \
                    np.loadtxt(expanduser("~") + "/data/tena/" + fname.replace("pwr", "det") + '.dat',
                               delimiter=",", dtype=object,
                               converters={0: strpdate2num("%d-%b-%Y %H:%M:%S"),
                                           1: float, 2: float}, unpack=True)
                t_det_new = np.array(t_det_new) * 3600. * 24.
                eps = 0.1
                min_samples = 10
            else:
                # get the date from the filename
                # find string matching %i_min_%2i_%2i_%2i[_%i] at end of filename
                # get the count interval from the filename
                # find string matching %i_min at end of string
                pattern = \
                    ".*_([0-9]*)_min_([0-9]*)_([0-9]*)_([0-9]*)(?:_([0-9]*))?$"
                result = re.split(pattern, fname)
                mm = result[2]
                dd = result[3]
                yy = result[4]
                t_int = float(result[1]) * 60.
                epoch = datetime.utcfromtimestamp(0)
                t_start = (datetime.strptime(
                    mm + "/" + dd + "/" + yy + " 01:00:00",
                    "%m/%d/%y %H:%M:%S") - epoch).total_seconds()
                pwr_new, eff_new, u_eff_new = \
                    np.loadtxt(expanduser("~") + '/data/tena/' + fname + '.dat',
                               delimiter=",", dtype=object,
                               converters={0: float, 1: float, 2: float},
                               unpack=True)
                u_pwr_new = 0.2 * np.ones_like(pwr_new)
                det_new = nps * np.array(eff_new)
                u_eff_new = u_nps * np.array(u_eff_new)
                t = t_start
                t_pwr_new = []
                for i in range(len(pwr_new)):
                    t_pwr_new = np.append(t_pwr_new, t)
                    t += t_int
                t_det_new = t_pwr_new.copy()
                eps = 0.24
                min_samples = 2

            # put the data in row form
            pwr_data = \
                [[float(t_pwr_new[i]), float(pwr_new[i])] for i in range(len(pwr_new))]

            # use the dbscan clustering algorithm
            X = StandardScaler().fit_transform(pwr_data)
            db = DBSCAN(eps=eps, min_samples=min_samples).fit(X)
            core_samples_mask = np.zeros_like(db.labels_, dtype=bool)
            core_samples_mask[db.core_sample_indices_] = True
            labels = db.labels_

            # Number of clusters in labels, ignoring noise if present.
            n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)
            marked = np.zeros_like(t_pwr_new)
            for i in range(len(t_pwr_new))[::-1]:
                label = labels[i]
                other_labels = [x for x in np.unique(labels) if x != label]
                for other in other_labels:
                    if (other >= 0) and inrange(t_pwr_new[i], t_pwr_new[labels == other]):
                        marked[i] = 1
            labels[marked == 1] = -1
            for i in [x for x in np.unique(labels) if x != -1]:
                if len(pwr_new[labels == i]) >= min_samples:
                    level = power_level(t_pwr_new[labels == i], pwr_new[labels == i],
                                        t_det_new, det_new)
                    levels = np.append(levels, level)
                    t_det = np.append(t_det, t_det_new)
                    t_pwr = np.append(t_pwr, t_pwr_new)
                    pwr = np.append(pwr, pwr_new)
                    u_pwr = np.append(u_pwr, u_pwr_new)
                    det = np.append(det, det_new)

        return levels
예제 #39
0
def convert_date(date_bytes):
    return mdates.strpdate2num('%m/%d/%Y')(date_bytes.decode('ascii')) # 将'ascii'省略也可以。%Y:2014,%y:14    %m:12,%b:Dec    %d:28   %H:13,%I:01   %M:49    %S:30
예제 #40
0
def process(inps):
    '''
    Do the actual processing.
    '''
    nSar = len(inps.fnames)
    print(inps.fnames)
    print('Number of SAR Scenes = %d' % nSar)

    Orbits = []
    print('Reading in all the raw files and metadata.')
    for k in xrange(nSar):
        if inps.raw:
            sar = createSensor('COSMO_SKYMED')
        else:
            sar = createSensor('COSMO_SKYMED_SLC')
            sar.hdf5 = inps.fnames[k]
            fd = CSKmetadata(sar)
            Orbits.append(orbit_info(sar, inps.fnames[k], fd[0]))

    ##########We now have all the pegpoints to start processing.
    Dopplers = np.zeros(nSar)
    Bperp = np.zeros(nSar)
    Days = np.zeros(nSar)

    #######Setting the first scene as temporary reference.
    master = Orbits[0]

    Dopplers[0] = master.fd
    Days[0] = master.dt.toordinal()
    for k in xrange(1, nSar):
        slave = Orbits[k]
        Bperp[k] = master.getBaseline(slave)
        Dopplers[k] = slave.fd
        Days[k] = slave.dt.toordinal()

    print("************************************")
    print("Index    Date       Bperp  Doppler")
    print("************************************")

    ### Plot
    if inps.plot:
        f = open("baseline.txt", 'w')
        g = open("bplot.txt", 'w')
        f.write("Index     Date       Bperp   Doppler \n")

    for k in xrange(nSar):
        print('{0:>3}    {1:>10} {2:4.2f}  {3:4.2f}'.format(
            k + 1, Orbits[k].dt.strftime('%Y-%m-%d'), Bperp[k], Dopplers[k]))

    ### Plot
    if inps.plot:
        f.write('{0:>3}    {1:>10}    {2:4.2f}     {3:4.2f} \n'.format(
            k + 1, Orbits[k].dt.strftime('%Y-%m-%d'), Bperp[k], Dopplers[k]))
        g.write('{0:>10}    {1:4.2f} \n'.format(
            Orbits[k].dt.strftime('%Y-%m-%d'), Bperp[k]))

    #### Looking at all possible pairs. Stop here if you just want to add
    ### 1 scene. If the first scene is the new scene, you have all reqd
    ### information at this stage.

    print("************************************")

    ### Plot
    if inps.plot:
        f.close()
        g.close()
        os.system('mkdir baselineInfos')
        os.system('mv baseline.txt bplot.txt baselineInfos')

    geomRho = (
        1 -
        np.clip(np.abs(Bperp[:, None] - Bperp[None, :]) / inps.Bcrit, 0., 1.))
    tempRho = np.exp(-1.0 * np.abs(Days[:, None] - Days[None, :]) / inps.Tau)
    dopRho = (np.abs(Dopplers[:, None] - Dopplers[None, :]) /
              master.prf) < inps.dop

    Rho = geomRho * tempRho * dopRho
    for kk in xrange(nSar):
        Rho[kk, kk] = 0.

    avgRho = np.mean(Rho, axis=1) * nSar / (nSar - 1)
    numViable = np.sum((Rho > inps.cThresh), axis=1)

    ####Currently sorting on average coherence.

    masterChoice = np.argsort(-avgRho)  #Descending order
    masterOrbit = Orbits[masterChoice[0]]
    masterBperp = Bperp[masterChoice[0]]

    print('*************************************')
    print('Ranking for Master Scene Selection: ')
    print('**************************************')
    print('Rank  Index      Date    nViable   Avg. Coh.')
    for kk in xrange(nSar):
        ind = masterChoice[kk]
        print('%03d   %03d   %10s  %03d  %02.3f' %
              (kk + 1, ind + 1, Orbits[ind].dt.strftime('%Y-%m-%d'),
               numViable[ind], avgRho[ind]))

    print('***************************************')

    print('***************************************')
    print('List of Viable interferograms:')
    print('***************************************')

    [ii, jj] = np.where(Rho > inps.cThresh)

    pairList = []
    print('Master     Slave      Bperp      Deltat')
    if inps.plot:
        os.system('rm baselineInfos/InSAR_pairs.txt')
        f = open("baselineInfos/InSAR_pairs.txt", 'w')
        f.write(
            'Master     Slave      Bperp(m)      Deltat(days)     Doppler(Hz) \n'
        )
        f.close()
        os.system('rm baselineInfos/InSAR_plot.txt')
        g = open("baselineInfos/InSAR_plot.txt", 'w')
        g.close()

    for mind, sind in itertools.izip(ii, jj):
        master = Orbits[mind]
        slave = Orbits[sind]

    #Plot
    giorni = []
    BaseList = []

    if master.dt > slave.dt:
        print('{0:>10} {1:>10}  {2:>4.2f}   {3:>4.2f}'.format(
            master.dt.strftime('%Y-%m-%d'), slave.dt.strftime('%Y-%m-%d'),
            Bperp[mind] - Bperp[sind], Days[mind] - Days[sind]))
        pairList.append([
            master.dt.strftime('%Y%m%d'),
            slave.dt.strftime('%Y%m%d'), Bperp[mind] - Bperp[sind]
        ])

    if inps.plot:
        #f=open("InSAR_plot.txt",'w')
        if master.dt > slave.dt:
            f = open("baselineInfos/InSAR_pairs.txt", 'a')
            f.write(
                '{0:>10} {1:>10}  {2:>4.2f}        {3:>4.2f}            {4:>4.2f} \n'
                .format(master.dt.strftime('%Y-%m-%d'),
                        slave.dt.strftime('%Y-%m-%d'),
                        Bperp[mind] - Bperp[sind], Days[mind] - Days[sind],
                        Dopplers[mind] - Dopplers[sind]))
            f.close()
            g = open("baselineInfos/InSAR_plot.txt", 'a')
            g.write(
                '{0:>10} {1:>10}  {2:>4.2f}   {3:>4.2f}       {4:>4.2f}   {5:>4.2f} \n'
                .format(master.dt.strftime('%Y-%m-%d'),
                        slave.dt.strftime('%Y-%m-%d'), Bperp[mind],
                        Bperp[sind], Dopplers[mind], Dopplers[sind]))
            plt.plot_date([Days[mind], Days[sind]], [Bperp[mind], Bperp[sind]],
                          'r-',
                          lw=1,
                          xdate=True,
                          ydate=False)

            #f=open("InSAR_plot.txt",'a')
            #f.write('{2:>4.2f} {2:>4.2f}     {3:>4.2f} {3:>4.2f} \n'.format(Bperp[mind], Bperp[sind], Days[mind], Days[sind]))
            #f.close()
            #print(Bperp[mind], Days[mind], Bperp[sind] , Days[sind])

    print('***************************************')

    #######Currently picks master peg point.
    print('***************************************')
    commonPeg = masterOrbit.peg
    print('Common peg point:                      ')
    print(commonPeg)
    print('Bperp Range:  [%f , %f] ' %
          (Bperp.min() - masterBperp, Bperp.max() - masterBperp))

    ######Choose median doppler
    commonDop = np.median(Dopplers)
    maxDop = np.max(Dopplers)
    minDop = np.min(Dopplers)
    varDop = np.max(np.abs(Dopplers - commonDop)) / masterOrbit.prf

    print('Common Doppler: ', commonDop)
    print('Doppler Range:  [%f, %f]' % (minDop, maxDop))
    print('MAx Doppler Variation = %f %%' % (varDop * 100))
    print('******************************************')

    ### Plot
    if inps.plot:
        days, bperp = np.loadtxt(
            "baselineInfos/bplot.txt",
            unpack=True,
            converters={0: mdates.strpdate2num('%Y-%m-%d')})
        plt.plot_date(x=days, y=bperp, xdate=True, ydate=False)
        date_span = 0.2 * np.abs(days.max() - days.min())
        bperp_span = 0.2 * np.abs(bperp.max() - bperp.min())
        plt.grid(True)
        plt.ylabel("Perpendicular Baseline (meters)")
        plt.xlabel("Time")
        plt.xlim([days.min() - date_span, days.max() + date_span])
        plt.ylim([bperp.min() - bperp_span, bperp.max() + bperp_span])
        plt.axes().set_aspect('auto', 'datalim')
        plt.savefig('baselineInfos/baseline.png')
        plt.show()

    return pairList
def bytespdate2num(fmt, encoding='utf-8'):
    str_converter = mdates.strpdate2num(fmt)
    def bytes_converter(b):
        s = b.decode(encoding)
        return str_converter(s)
    return bytes_converter
예제 #42
0
def date2num(date):
    converter = mdates.strpdate2num('%Y-%m-%d')
    return converter(date)
예제 #43
0
def bytespdata2num(fmt):
    strconverter = mdatas.strpdate2num(fmt)
    return strconverter
예제 #44
0
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
import matplotlib.dates as mdates
import numpy as np
from numpy import loadtxt
import time

totalStart = time.time()

date, bid, ask = np.loadtxt(
    'GBPUSD1d.txt',
    unpack=True,
    delimiter=',',
    converters={0: mdates.strpdate2num('%Y%m%d%H%M%S')})
avgLine = ((bid + ask) / 2)

####DEFINE######
#CHANGE#
patternAr = []
performanceAr = []
patForRec = []


def percentChange(startPoint, currentPoint):
    try:
        x = ((float(currentPoint) - startPoint) / abs(startPoint)) * 100.00
        if x == 0.0:
            return 0.000000001
        else:
            return x
예제 #45
0
    'washingtondc': 'darkgrey',
    'denver' : 'green',
    'sfbay' : 'red',
    'raleigh':'dimgray',
    'atlanta':'yellow',
    'seattle':'brown',
    'portland':'orange'
}


dt = dtype({'names'   : ['city', 'date', 'count'],
            'formats' : ['S100', int, int]})

city,dates,counts = loadtxt('pulls_per_date.csv', 
                           dtype=dt,
                           converters={1:mdates.strpdate2num('%Y-%m-%d')}, 
                           unpack=True,
                           comments = '#');


#cities=set(city)
#print cities

#for c in ['atlanta']: 
##    #fig,ax=plt.subplots()
#    tt=(city==c)
#    #plt.plot_date(date[tt],counts[tt],'-')
#    m,s,b=plt.stem(dates[tt],counts[tt], markerfmt=" ")
#    plt.setp(s,'color','blue','linewidth',2)
#    plt.gcf().autofmt_xdate() #Makes the dates fit, turns at angle
#    plt.gca().xaxis.set_major_formatter( matplotlib.dates.DateFormatter('%b %Y') )
예제 #46
0
def graph_data(stock_file):
    stock_reader = StockFileReader(stock_file)
    stock_reader.open()

    dates = []
    openp = []
    highp = []
    lowp = []
    closep = []
    volume = []
    sma1 = []
    sma2 = []

    sma1_indicator = SmaIndicator(12)
    sma2_indicator = SmaIndicator(26)

    dateconverter = mdates.strpdate2num("%Y%m%d")
    for row in stock_reader.read_all():
        dates.append(dateconverter(row.timestamp.to_string("%Y%m%d")))
        openp.append(row.open)
        highp.append(row.high)
        lowp.append(row.low)
        closep.append(row.adjusted_close)
        volume.append(row.volume)
        sma1_value = sma1_indicator.calculate(row.timestamp, row.close)
        if sma1_value is not None:
            sma1.append(sma1_value)

        sma2_value = sma2_indicator.calculate(row.timestamp, row.close)
        if sma2_value is not None:
            sma2.append(sma2_value)

    stock_reader.close()

    x = 0
    y = len(dates)
    candles = []
    while x < y:
        append_line = dates[x], openp[x], highp[x], lowp[x], closep[x], volume[
            x]
        candles.append((append_line))
        x += 1

    fig = plt.figure(facecolor='#07000d')

    ax0 = plt.subplot2grid((5, 4), (0, 0),
                           rowspan=1,
                           colspan=4,
                           facecolor='#07000d')
    ax0.spines['top'].set_color('#5998ff')
    ax0.spines['bottom'].set_color('#5998ff')
    ax0.spines['left'].set_color('#5998ff')
    ax0.spines['right'].set_color('#5998ff')
    ax0.spines['top'].set_color('#5998ff')
    ax0.tick_params(axis='x', colors='w')
    ax0.tick_params(axis='y', colors='w')
    plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='lower'))
    plt.ylabel('RSI', color='w')

    ax1 = plt.subplot2grid((5, 4), (1, 0),
                           rowspan=4,
                           colspan=4,
                           facecolor='#07000d')
    candlestick_ohlc(ax1,
                     candles,
                     width=0.75,
                     colorup='#9eff15',
                     colordown='#ff1717',
                     alpha=0.75)

    ax1.plot(dates[sma1_indicator.period - 1:],
             sma1,
             label=sma1_indicator.label)
    ax1.plot(dates[sma2_indicator.period - 1:],
             sma2,
             label=sma2_indicator.label)

    #ax1.grid(True, color='w')
    ax1.xaxis.set_major_locator(mticker.MaxNLocator(10))
    ax1.xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d"))
    #ax1.yaxis.label.set_color('w')
    ax1.spines['top'].set_color('#5998ff')
    ax1.spines['bottom'].set_color('#5998ff')
    ax1.spines['left'].set_color('#5998ff')
    ax1.spines['right'].set_color('#5998ff')
    ax1.spines['top'].set_color('#5998ff')
    ax1.tick_params(axis='y', colors='w')
    ax1.tick_params(axis='x', colors='w')

    for label in ax1.xaxis.get_ticklabels():
        label.set_rotation(45)

    plt.ylabel('Stock price', color='w')
    plt.legend(loc=3, prop={'size': 7}, fancybox=True, borderaxespad=0)
    '''
    ax2 = plt.subplot2grid((5, 4), (4, 0), sharex=ax1, rowspan=1, colspan=4, facecolor='#07000d')
    ax2.plot(dates, volume, '#00ffe8', linewidth=.8)
    ax2.fill_between(dates, 0, volume, facecolor='#00ffe8', alpha=.5)
    ax2.axes.yaxis.set_ticklabels([])
    ax2.spines['top'].set_color('#5998ff')
    ax2.spines['bottom'].set_color('#5998ff')
    ax2.spines['left'].set_color('#5998ff')
    ax2.spines['right'].set_color('#5998ff')
    ax2.spines['top'].set_color('#5998ff')
    ax2.tick_params(axis='x', colors='w')
    ax2.tick_params(axis='y', colors='w')
    for label in ax2.xaxis.get_ticklabels():
        label.set_rotation(45)
    plt.ylabel('Volume', color='w')
    '''

    ax1v = ax1.twinx()
    ax1v.fill_between(dates, 0, volume, facecolor='#00ffe8', alpha=.5)
    ax1v.axes.yaxis.set_ticklabels([])
    ax1v.spines['top'].set_color('#5998ff')
    ax1v.spines['bottom'].set_color('#5998ff')
    ax1v.spines['left'].set_color('#5998ff')
    ax1v.spines['right'].set_color('#5998ff')
    ax1v.spines['top'].set_color('#5998ff')
    ax1v.set_ylim(0, 2 * max(volume))
    ax1v.tick_params(axis='x', colors='w')
    ax1v.tick_params(axis='y', colors='w')

    plt.subplots_adjust(left=.10, bottom=.20, top=.90, wspace=.20, hspace=0)

    plt.xlabel('Date', color='w')

    plt.suptitle(stock_file + 'Stock Price', color='w')

    plt.setp(ax0.get_xticklabels(), visible=False)

    plt.show()