Ejemplo n.º 1
0
# Pad the space on the top to allow for an annotation below suptitle
fig.suptitle('Daily Reported Cases and Deaths in the US*', fontsize=22)
fig.subplots_adjust(top=0.9)
plt.annotate(f"*Percentage approximate. Updated on {date_str}", (175, 400),
             fontsize=12,
             xycoords='axes pixels')

# Annotate with the total number of cases and deaths.
ax.annotate(
    'Percent Infected:  {:.2f}%\nTotal Cases:  {:,}\nTotal Deaths: {:,}'.
    format(percent_infected, usa_cases, usa_deaths), (10, 260),
    fontsize=14,
    xycoords='axes pixels')

# Formate the x-axis dates
date_form = DateFormatter("%b")
ax.xaxis.set_major_formatter(date_form)

# Make axes tick labels the color of the graph
highest_cases, highest_deaths = max(usa_daily.new_cases), max(
    usa_daily.new_deaths)
ax2.tick_params(axis='y', labelcolor='purple')
ax.set_ylim([0, highest_cases + highest_cases // 10])
ax2.set_ylim([0, highest_deaths + highest_deaths // 10])

# Format the legend and grid
ax.legend(loc='upper left')
ax2.legend(loc='upper left', bbox_to_anchor=(0, 0.92))
ax2.grid(False)

if __name__ == '__main__':
Ejemplo n.º 2
0
#!/usr/bin/env python
import matplotlib.pyplot as plt
from matplotlib.dates import DateFormatter, WeekdayLocator, DayLocator, MONDAY, date2num
from matplotlib.finance import quotes_historical_yahoo_ohlc, candlestick_ohlc, candlestick2_ohlc
from _datetime import datetime, timedelta
from dataCenter import DataCenter

# (Year, month, day) tuples suffice as args for quotes_historical_yahoo
date1 = (2004, 2, 1)
date2 = (2004, 4, 12)

mondays = WeekdayLocator(MONDAY)  # major ticks on the mondays
alldays = DayLocator()  # minor ticks on the days
weekFormatter = DateFormatter('%b %d')  # e.g., Jan 12
dayFormatter = DateFormatter('%d')  # e.g., 12

# quotes = quotes_historical_yahoo_ohlc('INTC', date1, date2)

date1 = datetime(2016, 12, 1)
date2 = datetime(2016, 12, 19)
data = DataCenter.get_intraday_data('AAPL', date1, date2, 'IntradayGoogle')
dataDay = DataCenter.aggregate_intraday_data(data, timedelta(days=1))
quotes = [[date2num(rec[0])] + rec[1:] for rec in dataDay]

if len(quotes) == 0:
    raise SystemExit

fig, ax = plt.subplots()
fig.subplots_adjust(bottom=0.2)
ax.xaxis.set_major_locator(mondays)
ax.xaxis.set_minor_locator(alldays)
Ejemplo n.º 3
0
def main():
	"""main 是拿來測試&&demo一些功能用的 有註解過的code大概就是以前寫過的功能所留下的痕跡"""
	if len(sys.argv) < 4:
		print "Usage: %s [stock_name] [start_date] [end_date]" % (sys.argv[0])
		sys.exit()

	stock_name = sys.argv[1]
	start_date = str2date(sys.argv[2])
	end_date   = str2date(sys.argv[3])
	# print "%s %s %s" % (stock_name, start_date, end_date)

	stock_data = quotes_historical_yahoo_ohlc(stock_name, start_date, end_date)
	if len(stock_data) == 0:
		raise SystemExit
	# print stock_data
	print len(stock_data)
	local_max_point = list()
	local_min_point = list()
	local_max_point, local_min_point = find_bound(stock_data)
	print len(local_min_point)
	crit = find_crit(local_max_point, local_min_point)
	print "%d max points, %d min points" % (len(local_max_point), len(local_min_point))
	high_lines = list()
	low_lines = list()
	markx = list()
	marky = list()
	# for i in range(len(local_min_point)-1):
	# 	for j in range(i+1, len(local_min_point)):
	# 		phigh_line, plow_line , predicted_high, predicted_slope, x, y = predict_tunnel(local_max_point, [(local_min_point[i][0], local_min_point[i][4]), (local_min_point[j][0], local_min_point[j][4])], stock_data[0][0], stock_data[-1][0])
	# 		if phigh_line == 0 and plow_line == 0 and predicted_high == 0:
	# 			print "predict tunnel not found at %d %d" % (i , j)
	# 		else:
	# 			high_lines.append(phigh_line)
	# 			low_lines.append(plow_line)
	# 			markx.append(x)
	# 			marky.append(y)
	lines, x, y = predict_tunnel_2u(stock_data, local_min_point, local_max_point, stock_data[0][0], stock_data[-1][0])
	print "%d tunnels" % (len(lines))
	# high_line, high_slope = get_line(crit[0], crit[1], stock_data[0][0], stock_data[-1][0], 'b')
	# Line2D(xdata=(crit[0][0], crit[1][0]), ydata=(crit[0][1], crit[1][1]), color='k', linewidth=1.0, antialiased=True)
	# low_line , low_slope  = get_line(crit[2], crit[3], stock_data[0][0], stock_data[-1][0], 'b')
	# Line2D(xdata=(crit[2][0], crit[3][0]), ydata=(crit[2][1], crit[3][1]), color='k', linewidth=1.0, antialiased=True)
	# print "Predicted high: %lf\nPredicted tunnel slope: %lf" % (predicted_high, predicted_slope)
	# prediction
	# print stock_name + " " + prediction(crit, high_slope, low_slope, end_date)
	print x
	print y









#------------------------------------------------------plot-------------------------------------------------------------#
	"""這個部分算是matplotlib中滿重要的 包括怎麼用日期的方式標示x軸 然後在一個視窗畫多個圖 以及將點線加入要畫的圖上面主要是直接參考document上面然後再加以修改的"""	
	mondays = WeekdayLocator(MONDAY)        # major ticks on the mondays
	alldays = DayLocator()              # minor ticks on the days
	weekFormatter = DateFormatter('%b %d')  # e.g., Jan 12
	dayFormatter = DateFormatter('%d')      # e.g., 12
	fig = plt.figure() 
	for i in range(1, len(lines)+1): #當要有畫多個圖在同視窗才需要迴圈(廢話)
		ax = plt.subplot(2, len(lines)/2 +1, i) #這句是只說 分成兩行 總共len(lines)/2 +1個圖 現在畫第i張圖
		fig.subplots_adjust(bottom=0.2)
		ax.xaxis.set_major_locator(mondays)
		ax.xaxis.set_minor_locator(alldays)
		ax.xaxis.set_major_formatter(weekFormatter)
	#ax.xaxis.set_minor_formatter(dayFormatter)
	#plot_day_summary(ax, quotes, ticksize=3)
		candlestick_ohlc(ax, stock_data, width=0.7)
	# ax.add_line(high_line)
	# ax.add_line(low_line)
		ax.add_line(lines[i-1][0])
		ax.add_line(lines[i-1][1])
		plt.plot(x[i-1], y[i-1], 'bo')
		ax.xaxis_date()
		ax.autoscale_view()
	plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')
	plt.show()
Ejemplo n.º 4
0
def alert_smtp(alert, metric, context):
    """
    Called by :func:`~trigger_alert` and sends an alert via smtp to the
    recipients that are configured for the metric.

    """
    LOCAL_DEBUG = False
    logger = logging.getLogger(skyline_app_logger)
    if settings.ENABLE_DEBUG or LOCAL_DEBUG:
        logger.info('debug :: alert_smtp - sending smtp alert')
        logger.info('debug :: alert_smtp - Memory usage at start: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

    # FULL_DURATION to hours so that analyzer surfaces the relevant timeseries data
    # in the graph
    full_duration_in_hours = int(settings.FULL_DURATION) / 3600

    # @added 20161229 - Feature #1830: Ionosphere alerts
    # Added Ionosphere variables
    base_name = str(metric[1]).replace(settings.FULL_NAMESPACE, '', 1)
    if settings.IONOSPHERE_ENABLED:
        timeseries_dir = base_name.replace('.', '/')
        training_data_dir = '%s/%s/%s' % (
            settings.IONOSPHERE_DATA_FOLDER, str(int(metric[2])),
            timeseries_dir)
        graphite_image_file = '%s/%s.%s.graphite.%sh.png' % (
            training_data_dir, base_name, skyline_app,
            str(int(full_duration_in_hours)))
        json_file = '%s/%s.%s.redis.%sh.json' % (
            training_data_dir, base_name, skyline_app,
            str(int(full_duration_in_hours)))
        training_data_redis_image = '%s/%s.%s.redis.plot.%sh.png' % (
            training_data_dir, base_name, skyline_app,
            str(int(full_duration_in_hours)))

    # For backwards compatibility
    if '@' in alert[1]:
        sender = settings.ALERT_SENDER
        recipient = alert[1]
    else:
        sender = settings.SMTP_OPTS['sender']
        # @modified 20160806 - Added default_recipient
        try:
            recipients = settings.SMTP_OPTS['recipients'][alert[0]]
            use_default_recipient = False
        except:
            use_default_recipient = True
        if use_default_recipient:
            try:
                recipients = settings.SMTP_OPTS['default_recipient']
                logger.info(
                    'alert_smtp - using default_recipient as no recipients are configured for %s' %
                    str(alert[0]))
            except:
                logger.error(
                    'error :: alert_smtp - no known recipient for %s' %
                    str(alert[0]))
                return False

    # Backwards compatibility
    if type(recipients) is str:
        recipients = [recipients]

    # @modified 20161229 - Feature #1830: Ionosphere alerts
    # Ionosphere alerts
    unencoded_graph_title = 'Skyline %s - ALERT at %s hours - %s' % (
        context, str(int(full_duration_in_hours)), str(metric[0]))
    # @modified 20170603 - Feature #2034: analyse_derivatives
    # Added deriative functions to convert the values of metrics strictly
    # increasing monotonically to their deriative products in alert graphs and
    # specify it in the graph_title
    known_derivative_metric = False
    try:
        REDIS_ALERTER_CONN = redis.StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
    except:
        logger.error('error :: alert_smtp - redis connection failed')
    try:
        derivative_metrics = list(REDIS_ALERTER_CONN.smembers('derivative_metrics'))
    except:
        derivative_metrics = []
    redis_metric_name = '%s%s' % (settings.FULL_NAMESPACE, str(base_name))
    if redis_metric_name in derivative_metrics:
        known_derivative_metric = True
    if known_derivative_metric:
        try:
            non_derivative_monotonic_metrics = settings.NON_DERIVATIVE_MONOTONIC_METRICS
        except:
            non_derivative_monotonic_metrics = []
        skip_derivative = in_list(redis_metric_name, non_derivative_monotonic_metrics)
        if skip_derivative:
            known_derivative_metric = False
    if known_derivative_metric:
        unencoded_graph_title = 'Skyline %s - ALERT at %s hours - derivative graph - %s' % (
            context, str(int(full_duration_in_hours)), str(metric[0]))

    if settings.ENABLE_DEBUG or LOCAL_DEBUG:
        logger.info('debug :: alert_smtp - unencoded_graph_title: %s' % unencoded_graph_title)
    graph_title_string = quote(unencoded_graph_title, safe='')
    graph_title = '&title=%s' % graph_title_string

    graphite_port = '80'
    if settings.GRAPHITE_PORT != '':
        graphite_port = str(settings.GRAPHITE_PORT)

    link = '%s://%s:%s/render/?from=-%shours&target=cactiStyle(%s)%s%s&colorList=orange' % (
        settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
        graphite_port, str(int(full_duration_in_hours)), metric[1],
        settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
    # @added 20170603 - Feature #2034: analyse_derivatives
    if known_derivative_metric:
        link = '%s://%s:%s/render/?from=-%shours&target=cactiStyle(nonNegativeDerivative(%s))%s%s&colorList=orange' % (
            settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
            graphite_port, str(int(full_duration_in_hours)), metric[1],
            settings.GRAPHITE_GRAPH_SETTINGS, graph_title)

    content_id = metric[1]
    image_data = None
    if settings.SMTP_OPTS.get('embed-images'):
        # @added 20161229 - Feature #1830: Ionosphere alerts
        # Use existing data if files exist
        if os.path.isfile(graphite_image_file):
            try:
                with open(graphite_image_file, 'r') as f:
                    image_data = f.read()
                logger.info('alert_smtp - using existing png - %s' % graphite_image_file)
            except:
                logger.error(traceback.format_exc())
                logger.error('error :: alert_smtp - failed to read image data from existing png - %s' % graphite_image_file)
                logger.error('error :: alert_smtp - %s' % str(link))
                image_data = None

        if image_data is None:
            try:
                # @modified 20170913 - Task #2160: Test skyline with bandit
                # Added nosec to exclude from bandit tests
                image_data = urllib2.urlopen(link).read()  # nosec
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - image data OK')
            except urllib2.URLError:
                logger.error(traceback.format_exc())
                logger.error('error :: alert_smtp - failed to get image graph')
                logger.error('error :: alert_smtp - %s' % str(link))
                image_data = None
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - image data None')

    if LOCAL_DEBUG:
        logger.info('debug :: alert_smtp - Memory usage after image_data: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

    # If we failed to get the image or if it was explicitly disabled,
    # use the image URL instead of the content.
    if image_data is None:
        img_tag = '<img src="%s"/>' % link
    else:
        img_tag = '<img src="cid:%s"/>' % content_id
        if settings.ENABLE_DEBUG or LOCAL_DEBUG:
            logger.info('debug :: alert_smtp - img_tag: %s' % img_tag)

        if settings.IONOSPHERE_ENABLED:
            # Create Ionosphere Graphite image
            # @modified 20161229 - Feature #1830: Ionosphere alerts
            # Only write the data to the file if it does not exist
            if not os.path.isfile(graphite_image_file):
                try:
                    write_data_to_file(skyline_app, graphite_image_file, 'w', image_data)
                    logger.info(
                        'added %s Ionosphere Graphite image :: %s' % (
                            skyline_app, graphite_image_file))
                except:
                    logger.info(traceback.format_exc())
                    logger.error(
                        'error :: failed to add %s Ionosphere Graphite image' % (
                            skyline_app, graphite_image_file))
            else:
                logger.info(
                    '%s Ionosphere Graphite image already exists :: %s' % (
                        skyline_app, graphite_image_file))

    redis_image_data = None
    try:
        plot_redis_data = settings.PLOT_REDIS_DATA
    except:
        plot_redis_data = False

    if settings.SMTP_OPTS.get('embed-images') and plot_redis_data:
        # Create graph from Redis data
        redis_metric_key = '%s%s' % (settings.FULL_NAMESPACE, metric[1])
        try:
            raw_series = REDIS_ALERTER_CONN.get(redis_metric_key)
            if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - raw_series: %s' % 'OK')
        except:
            if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - raw_series: %s' % 'FAIL')

        try:
            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage before get Redis timeseries data: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
            unpacker = Unpacker(use_list=True)
            unpacker.feed(raw_series)
            timeseries_x = [float(item[0]) for item in unpacker]
            unpacker = Unpacker(use_list=True)
            unpacker.feed(raw_series)
            timeseries_y = [item[1] for item in unpacker]

            unpacker = Unpacker(use_list=False)
            unpacker.feed(raw_series)
            timeseries = list(unpacker)
            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage after get Redis timeseries data: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
        except:
            logger.error('error :: alert_smtp - unpack timeseries failed')
            timeseries = None

        if settings.IONOSPHERE_ENABLED and timeseries:
            '''
            .. todo: this is possibly to be used to allow the user to submit the
                FULL_DURATION duration data set for the features profile to be
                created against IF it is a Mirage metric.  This would allow for
                additional granularity in Mirage metrics, thereby maintaining
                their seasonality, but allow user and Skyline to analyze the
                anomaly at a FULL_DURATION resolution as well.  Not sure how to
                code that in Ionosphere context yet but could just be additonal
                flag in the Ionosphere record.  In the Ionosphere frontend, the
                user would be given an option to either create the features
                profile on the Mirage timeseries or the redis FULL_DURATION
                timeseries.  It is a little complicated, but doable.
                # @modified 20161229 - Feature #1828: ionosphere - mirage Redis data features
                However that ^^ is UNDESIRABLE in the Mirage/Ionosphere context
                at the moment.  Ionosphere must only profile SECOND_ORDER_RESOLUTION_HOURS
                currently so as to not pollute the seasonality aspect of Mirage
            '''
            # Create Ionosphere redis timeseries json if is does not exist
            # @modified 20161229 - Feature #1830: Ionosphere alerts
            # Only write the data to the file if it does not exist and replace
            # the timeseries object if a json file exists

            # @added 20170920 - Bug #2168: Strange Redis derivative graph
            using_original_redis_json = False

            if not os.path.isfile(json_file):
                timeseries_json = str(timeseries).replace('[', '(').replace(']', ')')
                try:
                    write_data_to_file(skyline_app, json_file, 'w', timeseries_json)
                    logger.info('added %s Ionosphere Redis data timeseries json file :: %s' % (skyline_app, json_file))
                except:
                    logger.info(traceback.format_exc())
                    logger.error('error :: failed to add %s Ionosphere Redis data timeseries json file' % (skyline_app, json_file))
            else:
                # Replace the timeseries object
                logger.info('%s Ionosphere Redis data timeseries json file already exists, using :: %s' % (skyline_app, json_file))
                anomaly_json = json_file
                try:
                    # Read the timeseries json file
                    with open(anomaly_json, 'r') as f:
                        raw_timeseries = f.read()
                    timeseries_array_str = str(raw_timeseries).replace('(', '[').replace(')', ']')
                    timeseries = literal_eval(timeseries_array_str)
                    logger.info('%s Redis timeseries replaced with timeseries from :: %s' % (skyline_app, anomaly_json))
                    timeseries_x = [float(item[0]) for item in timeseries]
                    timeseries_y = [item[1] for item in timeseries]
                    # @added 20170920 - Bug #2168: Strange Redis derivative graph
                    # This already has nonNegativeDerivative applied to it
                    using_original_redis_json = True
                except:
                    logger.error(traceback.format_exc())
                    logger.error(
                        'error :: %s failed to read timeseries data from %s' % (skyline_app, anomaly_json))
                    timeseries = None

        # @added 20170603 - Feature #2034: analyse_derivatives
        if known_derivative_metric:

            # @added 20170920 - Bug #2168: Strange Redis derivative graph
            # If this is the Mirage Redis json it already has
            # nonNegativeDerivative applied to it
            if not using_original_redis_json:
                logger.info('alert_smtp - nonNegativeDerivative being applied')

                try:
                    derivative_timeseries = nonNegativeDerivative(timeseries)
                    timeseries = derivative_timeseries
                    # @added 20170920 - Bug #2168: Strange Redis derivative graph
                    logger.info('alert_smtp - nonNegativeDerivative applied')
                except:
                    logger.error('error :: alert_smtp - nonNegativeDerivative failed')
            else:
                logger.info('alert_smtp - nonNegativeDerivative not being applied, as it will have been applied in the original json')

        # @added 21070726 - Bug #2068: Analyzer smtp alert error on Redis plot with derivative metrics
        # If the nonNegativeDerivative has been calculated we need to reset the
        # x and y as nonNegativeDerivative has to discard the first value as it
        # has no delta for it so the timeseries is 1 item less.
        timeseries_x = [float(item[0]) for item in timeseries]
        timeseries_y = [item[1] for item in timeseries]

        pd_series_values = None
        if timeseries:
            try:
                if LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - Memory usage before pd.Series: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
                values = pd.Series([x[1] for x in timeseries])
                # Because the truth value of a Series is ambiguous
                pd_series_values = True
                if LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - Memory usage after pd.Series: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
            except:
                logger.error('error :: alert_smtp - pandas value series on timeseries failed')

        if pd_series_values:
            try:
                array_median = np.median(values)
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - values median: %s' % str(array_median))

                array_amax = np.amax(values)
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - array_amax: %s' % str(array_amax))
                array_amin = np.amin(values)
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - array_amin: %s' % str(array_amin))
                mean = values.mean()
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - mean: %s' % str(mean))
                stdDev = values.std()
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - stdDev: %s' % str(stdDev))

                sigma3 = 3 * stdDev
                if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                    logger.info('debug :: alert_smtp - sigma3: %s' % str(sigma3))

                # sigma3_series = [sigma3] * len(values)

                sigma3_upper_bound = mean + sigma3
                try:
                    sigma3_lower_bound = mean - sigma3
                except:
                    sigma3_lower_bound = 0

                sigma3_upper_series = [sigma3_upper_bound] * len(values)
                sigma3_lower_series = [sigma3_lower_bound] * len(values)
                amax_series = [array_amax] * len(values)
                amin_series = [array_amin] * len(values)
                mean_series = [mean] * len(values)
            except:
                logger.error('error :: alert_smtp - numpy ops on series failed')
                mean_series = None

        if mean_series:
            graph_title = 'Skyline %s - ALERT - at %s hours - Redis data\n%s - anomalous value: %s' % (context, str(int(full_duration_in_hours)), metric[1], str(metric[0]))
            # @added 20170603 - Feature #2034: analyse_derivatives
            if known_derivative_metric:
                graph_title = 'Skyline %s - ALERT - at %s hours - Redis data (derivative graph)\n%s - anomalous value: %s' % (context, str(int(full_duration_in_hours)), metric[1], str(metric[0]))

            # @modified 20160814 - Bug #1558: Memory leak in Analyzer
            # I think the buf is causing a memory leak, trying a file
            # if python_version == 3:
            #     buf = io.StringIO()
            # else:
            #     buf = io.BytesIO()
            buf = '%s/%s.%s.%s.png' % (
                settings.SKYLINE_TMP_DIR, skyline_app, str(int(metric[2])), metric[1])

            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage before plot Redis data: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

            # Too big
            # rcParams['figure.figsize'] = 12, 6
            rcParams['figure.figsize'] = 8, 4
            try:
                # fig = plt.figure()
                fig = plt.figure(frameon=False)
                ax = fig.add_subplot(111)
                ax.set_title(graph_title, fontsize='small')
                # @modified 20180417 - Bug #2358: set_axis_bgcolor method removed from Matplotlib - Luminosity
                #                      IssueID #49 'AxesSubplot' object has no attribute 'set_axis_bgcolor'
                # ax.set_axis_bgcolor('black')
                if hasattr(ax, 'set_facecolor'):
                    ax.set_facecolor('black')
                else:
                    ax.set_axis_bgcolor('black')

                try:
                    datetimes = [dt.datetime.utcfromtimestamp(ts) for ts in timeseries_x]
                    if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                        logger.info('debug :: alert_smtp - datetimes: %s' % 'OK')
                except:
                    logger.error('error :: alert_smtp - datetimes: %s' % 'FAIL')

                plt.xticks(rotation=0, horizontalalignment='center')
                xfmt = DateFormatter('%a %H:%M')
                plt.gca().xaxis.set_major_formatter(xfmt)

                ax.xaxis.set_major_formatter(xfmt)

                ax.plot(datetimes, timeseries_y, color='orange', lw=0.6, zorder=3)
                ax.tick_params(axis='both', labelsize='xx-small')

                max_value_label = 'max - %s' % str(array_amax)
                ax.plot(datetimes, amax_series, lw=1, label=max_value_label, color='m', ls='--', zorder=4)
                min_value_label = 'min - %s' % str(array_amin)
                ax.plot(datetimes, amin_series, lw=1, label=min_value_label, color='b', ls='--', zorder=4)
                mean_value_label = 'mean - %s' % str(mean)
                ax.plot(datetimes, mean_series, lw=1.5, label=mean_value_label, color='g', ls='--', zorder=4)

                sigma3_text = (r'3$\sigma$')
                # sigma3_label = '%s - %s' % (str(sigma3_text), str(sigma3))

                sigma3_upper_label = '%s upper - %s' % (str(sigma3_text), str(sigma3_upper_bound))
                ax.plot(datetimes, sigma3_upper_series, lw=1, label=sigma3_upper_label, color='r', ls='solid', zorder=4)

                if sigma3_lower_bound > 0:
                    sigma3_lower_label = '%s lower - %s' % (str(sigma3_text), str(sigma3_lower_bound))
                    ax.plot(datetimes, sigma3_lower_series, lw=1, label=sigma3_lower_label, color='r', ls='solid', zorder=4)

                ax.get_yaxis().get_major_formatter().set_useOffset(False)
                ax.get_yaxis().get_major_formatter().set_scientific(False)

                # Shrink current axis's height by 10% on the bottom
                box = ax.get_position()
                ax.set_position([box.x0, box.y0 + box.height * 0.1,
                                 box.width, box.height * 0.9])

                # Put a legend below current axis
                ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.05),
                          fancybox=True, shadow=True, ncol=4, fontsize='x-small')
                plt.rc('lines', lw=2, color='w')

                plt.grid(True)

                ax.grid(b=True, which='both', axis='both', color='lightgray',
                        linestyle='solid', alpha=0.5, linewidth=0.6)
                # @modified 20180417 - Bug #2358: set_axis_bgcolor method removed from Matplotlib - Luminosity
                #                      IssueID #49 'AxesSubplot' object has no attribute 'set_axis_bgcolor'
                # ax.set_axis_bgcolor('black')
                if hasattr(ax, 'set_facecolor'):
                    ax.set_facecolor('black')
                else:
                    ax.set_axis_bgcolor('black')

                rcParams['xtick.direction'] = 'out'
                rcParams['ytick.direction'] = 'out'
                ax.margins(y=.02, x=.03)
                # tight_layout removes the legend box
                # fig.tight_layout()
                try:
                    if LOCAL_DEBUG:
                        logger.info('debug :: alert_smtp - Memory usage before plt.savefig: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
                    plt.savefig(buf, format='png')

                    if settings.IONOSPHERE_ENABLED:
                        if not os.path.exists(training_data_dir):
                            mkdir_p(training_data_dir)
                            logger.info('created dir - %s' % training_data_dir)

                        if not os.path.isfile(training_data_redis_image):
                            try:
                                plt.savefig(training_data_redis_image, format='png')
                                logger.info(
                                    'alert_smtp - save Redis training data image - %s' % (
                                        training_data_redis_image))
                            except:
                                logger.info(traceback.format_exc())
                                logger.error(
                                    'error :: alert_smtp - could not save - %s' % (
                                        training_data_redis_image))
                        else:
                            logger.info(
                                'alert_smtp - Redis training data image already exists - %s' % (
                                    training_data_redis_image))

                    # @added 20160814 - Bug #1558: Memory leak in Analyzer
                    # As per http://www.mail-archive.com/[email protected]/msg13222.html
                    # savefig in the parent process was causing the memory leak
                    # the below fig.clf() and plt.close() did not resolve this
                    # however spawing a multiprocessing process for alert_smtp
                    # does solve this as issue as all memory is freed when the
                    # process terminates.
                    fig.clf()
                    plt.close(fig)
                    redis_graph_content_id = 'redis.%s' % metric[1]
                    redis_image_data = True
                    if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                        logger.info('debug :: alert_smtp - savefig: %s' % 'OK')
                        logger.info('debug :: alert_smtp - Memory usage after plt.savefig: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
                except:
                    logger.info(traceback.format_exc())
                    logger.error('error :: alert_smtp - plt.savefig: %s' % 'FAIL')
            except:
                logger.error(traceback.format_exc())
                logger.error('error :: alert_smtp - could not build plot')

    if LOCAL_DEBUG:
        logger.info('debug :: alert_smtp - Memory usage before email: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

    if redis_image_data:
        redis_img_tag = '<img src="cid:%s"/>' % redis_graph_content_id
        if settings.ENABLE_DEBUG or LOCAL_DEBUG:
            logger.info('debug :: alert_smtp - redis_img_tag: %s' % str(redis_img_tag))
    else:
        # @modified 20161229 - Feature #1830: Ionosphere alerts
        # @modified 20170108 - Feature #1852: Ionosphere - features_profile matched graphite graphs
        # Restored the previous redis_img_tag method as some smtp alerts were
        # coming without a Redis graph, not all but some and for some reason,
        # I am pretty certain retrospectively that it was done that way from
        # testing I just wanted to try and be cleaner.
        # The redis_img_tag was changed at
        # https://github.com/earthgecko/skyline/commit/31bcacf3f90f0953ebed0d57260cb937e01f887c#diff-520bf2a218f65074ffead4d8184c138dR489
        redis_img_tag = '<img src="%s"/>' % 'none'
        # redis_img_tag = '<img src="none"/>'

    # @added 20170806 - Feature #1830: Ionosphere alerts
    # Show a human date in alerts
    alerted_at = str(dt.datetime.utcfromtimestamp(int(metric[2])))

    try:
        body = '<h3><font color="#dd3023">Sky</font><font color="#6698FF">line</font><font color="black"> %s alert</font></h3><br>' % context
        body += '<font color="black">metric: <b>%s</b></font><br>' % metric[1]
        body += '<font color="black">Anomalous value: %s</font><br>' % str(metric[0])
        body += '<font color="black">Anomaly timestamp: %s</font><br>' % str(int(metric[2]))
        # @added 20170806 - Feature #1830: Ionosphere alerts
        # Show a human date in alerts
        body += '<font color="black">Anomalous at: %s</font><br>' % alerted_at
        body += '<font color="black">At hours: %s</font><br>' % str(int(full_duration_in_hours))
        body += '<font color="black">Next alert in: %s seconds</font><br>' % str(alert[2])
        # @added 20170603 - Feature #2034: analyse_derivatives
        if known_derivative_metric:
            body += '<font color="black">Derivative graph: True</font><br>'

        more_body = ''
        if settings.IONOSPHERE_ENABLED:
            # @modified 20170823 - Bug #2142: 7bit SMTP encoding breaking long urls
            # Broke body into body and more_body to workaround the 990 character
            # limit per line for SMTP
            more_body += '<h3><font color="#dd3023">Ionosphere :: </font><font color="#6698FF">training data</font><font color="black"></font></h3>'
            ionosphere_link = '%s/ionosphere?timestamp=%s&metric=%s' % (
                settings.SKYLINE_URL, str(int(metric[2])), str(metric[1]))
            more_body += '<font color="black">To use this timeseries to train Skyline that this is not anomalous manage this training data at:<br>'
            more_body += '<a href="%s">%s</a></font>' % (ionosphere_link, ionosphere_link)
        if redis_image_data:
            more_body += '<font color="black">min: %s  | max: %s   | mean: %s <br>' % (
                str(array_amin), str(array_amax), str(mean))
            more_body += '3-sigma: %s <br>' % str(sigma3)
            more_body += '3-sigma upper bound: %s   | 3-sigma lower bound: %s <br></font>' % (
                str(sigma3_upper_bound), str(sigma3_lower_bound))
            more_body += '<h3><font color="black">Redis data at FULL_DURATION</font></h3><br>'
            more_body += '<div dir="ltr">:%s<br></div>' % redis_img_tag
        if image_data:
            more_body += '<h3><font color="black">Graphite data at FULL_DURATION (may be aggregated)</font></h3>'
            more_body += '<div dir="ltr"><a href="%s">%s</a><br></div><br>' % (link, img_tag)
            more_body += '<font color="black">Clicking on the above graph will open to the Graphite graph with current data</font><br>'
        if redis_image_data:
            more_body += '<font color="black">To disable the Redis data graph view, set PLOT_REDIS_DATA to False in your settings.py, if the Graphite graph is sufficient for you,<br>'
            more_body += 'however do note that will remove the 3-sigma and mean value too.</font>'
        more_body += '<br>'
        more_body += '<div dir="ltr" align="right"><font color="#dd3023">Sky</font><font color="#6698FF">line</font><font color="black"> version :: %s</font></div><br>' % str(skyline_version)
    except:
        logger.error('error :: alert_smtp - could not build body')
        logger.info(traceback.format_exc())

    for recipient in recipients:
        try:
            # @modified 20170823 - Bug #2142: 7bit SMTP encoding breaking long urls
            # Broke body into body and more_body to workaround the 990 character
            # limit per line for SMTP, using mixed as alternative indicates that
            # the client should select one of the parts for display and ignore
            # the rest (tripleee - https://stackoverflow.com/a/35115938)
            # msg = MIMEMultipart('alternative')
            msg = MIMEMultipart('mixed')

            # @added 20170812 - Bug #2142: 7bit SMTP encoding breaking long urls
            # set email charset and email encodings
            cs_ = charset.Charset('utf-8')
            cs_.header_encoding = charset.QP
            cs_.body_encoding = charset.QP
            msg.set_charset(cs_)

            msg['Subject'] = '[Skyline alert] - %s ALERT - %s' % (context, metric[1])
            msg['From'] = sender
            msg['To'] = recipient

            msg.attach(MIMEText(body, 'html'))
            # @added 20170823 - Bug #2142: 7bit SMTP encoding breaking long urls
            # Broke body into body and more_body to workaround the 990 character
            # limit per line for SMTP
            msg.replace_header('content-transfer-encoding', 'quoted-printable')
            msg.attach(MIMEText(more_body, 'html'))

            if redis_image_data:
                try:
                    # @modified 20160814 - Bug #1558: Memory leak in Analyzer
                    # I think the buf is causing a memory leak, trying a file
                    # buf.seek(0)
                    # msg_plot_attachment = MIMEImage(buf.read())
                    # msg_plot_attachment = MIMEImage(buf.read())
                    try:
                        with open(buf, 'r') as f:
                            plot_image_data = f.read()
                        try:
                            os.remove(buf)
                        except OSError:
                            logger.error(
                                'error :: alert_smtp - failed to remove file - %s' % buf)
                            logger.info(traceback.format_exc())
                            pass
                    except:
                        logger.error('error :: failed to read plot file - %s' % buf)
                        plot_image_data = None

                    # @added 20161124 - Branch #922: ionosphere
                    msg_plot_attachment = MIMEImage(plot_image_data)
                    msg_plot_attachment.add_header('Content-ID', '<%s>' % redis_graph_content_id)
                    msg.attach(msg_plot_attachment)
                    if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                        logger.info('debug :: alert_smtp - msg_plot_attachment - redis data done')
                except:
                    logger.error('error :: alert_smtp - msg_plot_attachment')
                    logger.info(traceback.format_exc())

            if image_data is not None:
                try:
                    msg_attachment = MIMEImage(image_data)
                    msg_attachment.add_header('Content-ID', '<%s>' % content_id)
                    msg.attach(msg_attachment)
                    if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                        logger.info('debug :: alert_smtp - msg_attachment - Graphite img source done')
                except:
                    logger.error('error :: alert_smtp - msg_attachment')
                    logger.info(traceback.format_exc())
        except:
            logger.error('error :: alert_smtp - could not attach')
            logger.info(traceback.format_exc())

        s = SMTP('127.0.0.1')
        try:
            s.sendmail(sender, recipient, msg.as_string())
            if settings.ENABLE_DEBUG or LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - message sent to %s OK' % str(recipient))
        except:
            logger.error('error :: alert_smtp - could not send email to %s' % str(recipient))
            logger.info(traceback.format_exc())

        s.quit()

        if LOCAL_DEBUG:
            logger.info('debug :: alert_smtp - Memory usage after email: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

        if redis_image_data:
            # buf.seek(0)
            # buf.write('none')
            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage before del redis_image_data objects: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
            del raw_series
            del unpacker
            del timeseries[:]
            del timeseries_x[:]
            del timeseries_y[:]
            del values
            del datetimes[:]
            del msg_plot_attachment
            del redis_image_data
            # We del all variables that are floats as they become unique objects and
            # can result in what appears to be a memory leak, but is not, it is
            # just the way Python handles floats
            del mean
            del array_amin
            del array_amax
            del stdDev
            del sigma3
            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage after del redis_image_data objects: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage before del fig object: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
            # @added 20160814 - Bug #1558: Memory leak in Analyzer
            #                   Issue #21 Memory leak in Analyzer - https://github.com/earthgecko/skyline/issues/21
            # As per http://www.mail-archive.com/[email protected]/msg13222.html
            fig.clf()
            plt.close(fig)
            del fig
            if LOCAL_DEBUG:
                logger.info('debug :: alert_smtp - Memory usage after del fig object: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

        if LOCAL_DEBUG:
            logger.info('debug :: alert_smtp - Memory usage before del other objects: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
        del recipients[:]
        del body
        del msg
        del image_data
        del msg_attachment
        if LOCAL_DEBUG:
            logger.info('debug :: alert_smtp - Memory usage after del other objects: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
        return
Ejemplo n.º 5
0
    def plot_frost_depth(self,
                         sizer,
                         styler,
                         section_name,
                         save_name,
                         frost_front,
                         thaw_front,
                         interface_list,
                         layer_names,
                         title=False,
                         twinx=True,
                         save_fig=True,
                         show_fig=False):

        # local styler and sizer:
        if styler['name'] == 'light':
            color1 = 'cornflowerblue'
            color2 = 'lightcoral'
            color3 = 'black'  # air temperature line
            color4 = '#dddddd'
            color5 = 'cornflowerblue'  # fill between air temperature
            plotline_width = 1
        else:
            color1 = '#3598FE'
            color2 = '#F29724'
            color3 = '#2B6A6C'  # air temperature line
            color4 = '#404040'
            color5 = '#2B6A6C'  # fill between air temperature
            plotline_width = 1

        # initializer:
        fig, ax = plt.subplots(figsize=sizer['figsize'])
        fig.patch.set_facecolor(styler['facecolor'])
        ax.set_facecolor(styler['axis_color'])
        plt.tight_layout()

        # the following should be placed somewhere else.. or should it?
        plot_periods = []
        flag = False
        period = []
        for date, temp in zip(self.dfs.FDdata['date'],
                              self.dfs.FDdata[frost_front]):
            if temp == 0 and flag is False:
                period.append(date)
                flag = True
            elif temp == 0 or np.isnan(temp) and flag is True:
                period.append(date - pd.Timedelta(days=1))
                plot_periods.append(period)
                if temp == 0:
                    period = [date]
                else:
                    flag = False
                    period = []

        # refactoring the frost period function:
        frost_periods = []
        thaw_periods = []
        f_period_locs = []
        f_period_dates = []

        t_period_locs = []
        t_period_dates = []
        for date, floc, tloc in zip(self.dfs.FDdata['date'],
                                    self.dfs.FDdata[frost_front],
                                    self.dfs.FDdata[thaw_front]):

            if np.isnan(floc) and len(
                    f_period_locs) != 0 and date not in self.missing_dates:
                # This is executed when the end of frost period is reached and it is not terminated by missing date
                f_period_locs.append(f_period_locs[-1])
                f_period_dates.append(date + pd.Timedelta(hours=6))
                frost_periods.append([f_period_locs, f_period_dates])
                f_period_locs = []
                f_period_dates = []
            elif np.isnan(floc) and len(
                    f_period_locs) != 0 and date in self.missing_dates:
                # This is executed when teh end of frost period is reached and it is terminated by a missing date
                frost_periods.append([f_period_locs, f_period_dates])
                f_period_locs = []
                f_period_dates = []

            else:
                # This statement is executed when floc != 0
                if len(f_period_locs) == 0 and (
                        date - pd.Timedelta(days=1)) not in self.missing_dates:
                    # This is executed when a new period starts and there ha snot been a missing date before.
                    # If so then on top of the current dates and floc, a zero and date-1 is added to the list.
                    f_period_locs.append(0)
                    f_period_locs.append(floc)
                    f_period_dates.append(date - pd.Timedelta(days=1))
                    f_period_dates.append(date)

                elif len(f_period_locs) == 0 and (
                        date - pd.Timedelta(days=1)) in self.missing_dates:
                    # This is executed when a new period starts, but there has been a missing dates before.
                    # In this case zero and date-1 is not added.
                    f_period_locs.append(floc)
                    f_period_dates.append(date)

                elif len(f_period_locs) != 0:
                    # This is executed when this value is not the first in the frost period.
                    f_period_locs.append(floc)
                    f_period_dates.append(date)
        for period in frost_periods:
            print(period)

        for period in plot_periods:
            mask = (self.dfs.FDdata['date'] >=
                    period[0]) & (self.dfs.FDdata['date'] <= period[1])
            df = self.dfs.FDdata.loc[mask]

            ax.plot_date(df['date'],
                         df[frost_front],
                         marker=None,
                         color=color1,
                         linestyle='solid',
                         linewidth=plotline_width)

        # plot data:
        ax.plot_date([], [],
                     label='frost front',
                     marker=None,
                     color=color1,
                     linestyle='solid',
                     linewidth=plotline_width)
        ax.plot_date(self.dfs.FDdata['date'],
                     self.dfs.FDdata[thaw_front],
                     label='thaw front',
                     marker=None,
                     color=color2,
                     linestyle='solid',
                     linewidth=plotline_width)

        layer_thickness_list = np.ediff1d(interface_list)
        for interface in interface_list:
            ax.plot([self.start_date, self.end_date], [interface, interface],
                    color=styler['aidline_color'],
                    linestyle='--',
                    linewidth=sizer['aidline_width'])
            ax.plot([self.zero_date, self.start_date], [interface, interface],
                    color=styler['aidline_color'],
                    linestyle='solid',
                    linewidth=sizer['aidline_width'])
        ax.plot([self.dfs.start_date, self.dfs.start_date], [0, 4],
                color=styler['aidline_color'],
                linestyle='solid',
                linewidth=sizer['aidline_width'])

        for lost_period in self.year_sample['missing_dates']:
            start = lost_period['start_date']
            end = lost_period['end_date']
            ax.axvspan(start, end, facecolor=color4)
        ax.axvspan(np.nan,
                   np.nan,
                   facecolor=color4,
                   label='missing frost data')

        # add text to plot:
        for i in range(len(layer_thickness_list)):
            if layer_thickness_list[i] <= 0.11:
                ax.text(
                    self.zero_date +
                    datetime.timedelta(self.dfs.days * 0.25 * 0.10),
                    (interface_list[i] + layer_thickness_list[i] / 2 + 0.025),
                    layer_names[i][0],
                    fontsize=sizer['text_size'],
                    color=styler['text_color'])
            else:
                ax.text(self.zero_date +
                        datetime.timedelta(self.dfs.days * 0.25 * 0.10),
                        (interface_list[i] + layer_thickness_list[i] / 2 +
                         0.025 - 0.05),
                        layer_names[i][0],
                        fontsize=sizer['text_size'],
                        color=styler['text_color'])
                ax.text(self.zero_date +
                        datetime.timedelta(self.dfs.days * 0.25 * 0.10),
                        (interface_list[i] + layer_thickness_list[i] / 2 +
                         0.025 + 0.05),
                        layer_names[i][1],
                        fontsize=sizer['text_size'],
                        color=styler['text_color'])
        ax.text(self.zero_date +
                datetime.timedelta(self.dfs.days * 0.25 * 0.10),
                (interface_list[-1] + 0.2),
                layer_names[-1][0],
                fontsize=sizer['text_size'],
                color=styler['text_color'])

        if twinx:
            ax.plot(np.nan,
                    marker=None,
                    color=color3,
                    linestyle='solid',
                    linewidth=plotline_width,
                    label='air temperature')

        # PRE-CONFIGURATION (has to be executed before the twinx plot):
        # legend:
        legend = ax.legend(
            loc='lower left',
            bbox_to_anchor=(0.13, 0.015),
            fontsize=sizer['legend_size'],
            handlelength=sizer['legend_length'],
            #edgecolor=styler['legend_edge_color'],
            frameon=False,
            facecolor=styler['legend_face_color'],
            labelcolor=styler['legend_text_color'])
        #legend.get_frame().set_linewidth(sizer['legend_frame_width'])
        legend.remove()

        # temperature on plots (secondary axes)
        if twinx:
            # initializer:
            ax_twinx = ax.twinx()

            # plot data:
            ax_twinx.plot_date(self.dfs.ATdata['date'],
                               self.dfs.ATdata['avg'],
                               label='air temperature',
                               marker=None,
                               color=color3,
                               linestyle='solid',
                               linewidth=plotline_width)
            ax_twinx.plot([self.dfs.start_date, self.dfs.end_date], [0, 0],
                          linestyle='solid',
                          color=styler['aidline_color'],
                          linewidth=sizer['aidline_width'])
            ax_twinx.fill_between(self.dfs.ATdata['date'],
                                  self.dfs.ATdata['avg'],
                                  where=self.dfs.boolean_list,
                                  color=color5,
                                  interpolate=True,
                                  alpha=0.7)

            # TWINX CONFIGURATION
            # legend:
            ax_twinx.add_artist(legend)

            # ticks:
            ylim_twin, yticks_twin = self.generate_yaxis_twin_ticks()
            ax_twinx.set_ylim(ylim_twin)
            ax_twinx.set_yticks(yticks_twin)
            ax_twinx.tick_params(direction='in', width=1.5)
            ax_twinx.tick_params(axis='y',
                                 direction='in',
                                 width=sizer['tick_width'],
                                 labelsize=sizer['tick_size'],
                                 length=sizer['tick_length'],
                                 colors=styler['tick_color'])
            ax.tick_params(
                axis='both',
                direction='in',
                width=sizer['tick_width'],
                labelsize=sizer['tick_size'],
                length=sizer['tick_length'],
                colors=styler['tick_color'],
                top=True,
            )

            # label:
            ax_twinx.set_ylabel('Temperature, °C',
                                size=sizer['label_size'],
                                color=styler['label_color'])
            ax_twinx.yaxis.set_label_coords(1.05, 0.22)

            # spines:
            for spine in ['top', 'bottom', 'left', 'right']:
                ax_twinx.spines[spine].set_visible(False)

        else:
            # legend:
            ax.add_artist(legend)

            # ticks:
            ax.tick_params(axis='both',
                           direction='in',
                           width=sizer['tick_width'],
                           labelsize=sizer['tick_size'],
                           length=sizer['tick_length'],
                           color=styler['tick_color'],
                           labelcolor=styler['tick_label_color'],
                           right=True,
                           top=True)

        # MAIN CONFIGURATION:
        # ticks:
        ax.set_xlim(self.dfs.zero_date, self.dfs.end_date)
        ax.set_xticks(self.dfs.date_list)
        ax.set_ylim([3, 0])
        ax.set_yticks([0, 0.5, 1, 1.5, 2, 2.5, 3])
        ax.xaxis.set_major_formatter(DateFormatter("%Y-%m-%d"))
        ax.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
        plt.setp(ax.get_xticklabels(),
                 rotation=sizer['tick_rotation'],
                 ha='right')

        # label:
        ax.set_ylabel('Depth, m',
                      size=sizer['label_size'],
                      color=styler['label_color'])

        # spines:
        for spine in ['top', 'bottom', 'left', 'right']:
            ax.spines[spine].set_linewidth(sizer['spine_width'])
            ax.spines[spine].set_color(styler['spine_color'])

        # title:
        if title:
            ax.set_title(section_name)

        # save show options:
        if save_fig:
            fig.savefig('out\\' + save_name,
                        dpi=300,
                        bbox_inches='tight',
                        facecolor=fig.get_facecolor())

        if show_fig:
            plt.show()

        plt.close(fig)
Ejemplo n.º 6
0
    def plotpoi(self):
        self.varpoi1 = self.dlg.comboBox_POI.currentText()

        if self.dlg.comboBox_POI.currentText() == 'Not Specified':
            QMessageBox.critical(self.dlg, "Error", "No POI is selected")
            return

        data1 = np.loadtxt(self.folderPath[0] + '/POI_' + self.varpoi1 + '.txt', skiprows=1)
        varpos = [5, 6, 9, 7, 8, 10, 11, 16, 17, 22, 23, 24, 25, 26, 27, 28, 29]
        wm2 = '$W$'' ''$m ^{-2}$'
        degC = '$^{o}C$'
        deg = '$Degrees(^{o})$'
        frac = ''
        varunit = [deg, deg, wm2, wm2, wm2, wm2, wm2, wm2, wm2, degC, degC, frac, frac, degC, wm2, frac, frac]

        datenum_yy = np.zeros(data1.shape[0])
        for i in range(0, data1.shape[0]):  # making date number
            datenum_yy[i] = dt.date2num(dt.datetime.datetime(int(data1[i, 0]), 1, 1))

        dectime = datenum_yy + data1[:, 4]

        dates = dt.num2date(dectime)
        # QMessageBox.critical(self.dlg, "data", str(dates))

        if not self.dlg.checkboxUsePOI.isChecked():
            # One variable
            id = self.dlg.comboBox_POIVariable.currentIndex() - 1

            if self.dlg.comboBox_POIVariable.currentText() == 'Not Specified':
                QMessageBox.critical(self.dlg, "Error", "No plotting variable is selected")
                return

            plt.figure(1, figsize=(15, 7), facecolor='white')
            plt.title(self.dlg.comboBox_POIVariable.currentText())
            ax1 = plt.subplot(1, 1, 1)
            ax1.plot(dates, data1[:, varpos[id]], 'r', label='$' + self.dlg.comboBox_POIVariable.currentText() + '$')
            plt.setp(plt.gca().xaxis.get_majorticklabels(),'rotation', 45)
            ax1.grid(True)

            if (np.max(data1[:, 1]) - np.min(data1[:, 1])) > 1:
                ax1.xaxis.set_major_locator(DayLocator())
                ax1.xaxis.set_major_formatter(DateFormatter("%Y-%m-%d"))
            else:
                ax1.xaxis.set_minor_locator(HourLocator())
                ax1.xaxis.set_major_formatter(DateFormatter("%H:%M"))

            ax1.set_ylabel(varunit[id], fontsize=14)
            ax1.set_xlabel('Time', fontsize=14)
        else:
            # Two variables
            id1 = self.dlg.comboBox_POIVariable.currentIndex() - 1
            id2 = self.dlg.comboBox_POIVariable_2.currentIndex() - 1
            if self.dlg.comboBox_POIVariable_2.currentText() == 'Not Specified' or self.dlg.comboBox_POIVariable.currentText() == 'Not Specified':
                QMessageBox.critical(self.dlg, "Error", "No plotting variable is selected")
                return
            self.varpoi2 = self.dlg.comboBox_POI_2.currentText()
            data2 = np.loadtxt(self.folderPath[0] + '/POI_' + self.varpoi2 + '.txt', skiprows=1)

            if self.dlg.checkboxScatterbox.isChecked():
                plt.figure(1, figsize=(11, 11), facecolor='white')
                plt.title(self.dlg.comboBox_POIVariable.currentText() + '(' + self.varpoi1 + ') vs ' + self.dlg.comboBox_POIVariable_2.currentText() + '(' + self.varpoi2 + ')')
                ax1 = plt.subplot(1, 1, 1)
                ax1.plot(data1[:, varpos[id1]], data2[:, varpos[id2]], "k.")
                ax1.set_ylabel(varunit[id2], fontsize=14)
                ax1.set_xlabel(varunit[id1], fontsize=14)
            else:
                plt.figure(1, figsize=(15, 7), facecolor='white')
                plt.title(self.dlg.comboBox_POIVariable.currentText() + '(' + self.varpoi1 + ') and ' + self.dlg.comboBox_POIVariable_2.currentText() + '(' + self.varpoi2 + ')')
                ax1 = plt.subplot(1, 1, 1)
                if not varunit[id1] == varunit[id2]:
                    ax2 = ax1.twinx()
                    ax1.plot(dates, data1[:, varpos[id1]], 'r', label='$' + self.dlg.comboBox_POIVariable.currentText() + ' (' + self.varpoi1 + ')$')
                    ax1.legend(loc=1)
                    ax2.plot(dates, data2[:, varpos[id2]], 'b', label='$' + self.dlg.comboBox_POIVariable_2.currentText() + ' (' + self.varpoi2 + ')$')
                    ax2.legend(loc=2)
                    ax1.set_ylabel(varunit[id1], color='r', fontsize=14)
                    ax2.set_ylabel(varunit[id2], color='b', fontsize=14)
                else:
                    ax1.plot(dates, data1[:, varpos[id1]], 'r', label='$' + self.dlg.comboBox_POIVariable.currentText() + ' (' + self.varpoi1 + ')$')
                    ax1.plot(dates, data2[:, varpos[id2]], 'b', label='$' + self.dlg.comboBox_POIVariable_2.currentText() + ' (' + self.varpoi2 + ')$')
                    ax1.legend(loc=2)
                    ax1.set_ylabel(varunit[id1], fontsize=14)

                plt.setp(plt.gca().xaxis.get_majorticklabels(), 'rotation', 45)
                ax1.grid(True)

                if (np.max(data1[:, 1]) - np.min(data1[:, 1])) > 1:
                    ax1.xaxis.set_major_locator(DayLocator())
                    ax1.xaxis.set_major_formatter(DateFormatter("%Y-%m-%d %H:%M"))
                else:
                    ax1.xaxis.set_minor_locator(HourLocator())
                    ax1.xaxis.set_major_formatter(DateFormatter("%H:%M"))

                ax1.set_xlabel('Time', fontsize=14)
        plt.show()
Ejemplo n.º 7
0
# #h, PRN1pose = broadcast2posM(a,np.array([weeks,timevec]).transpose(),5)
# # print(PRN1pose)
# # plt.plot(timevec, PRN1pose[:,0],'g-')
# # plt.plot(timevec, PRN1pose[:,1], 'b-')
# # plt.plot(timevec, PRN1pose[:,2], 'r-')

# [diction, dfyuma] =read_GPSyuma('YUMA245.ALM')

# h, posAlm = broadcast2pos(dfyuma,np.array([weeks,timevec]).transpose(),5 )
# plt.plot(timevec,posAlm[:,0]-PRN1pose[:,0],'g--')
# plt.plot(timevec,posAlm[:,1]-PRN1pose[:,1], 'b--')
# plt.plot(timevec,posAlm[:,2]-PRN1pose[:,2], 'r--')

# plt.show()
formatter = DateFormatter("%d %h %H:%M ")
fig = plt.figure()
axpr = fig.add_subplot(111)
head, obs_data = parse_rinex_obs_file("nist2450.20o")
print(obs_data["G05"])
base = np.datetime64("2020-09-01T00:00:00")
seconds = (obs_data["G05"].time - base) / 1e6 + 172800
weekComp = np.ones(len(seconds)) * 2121
#print(seconds.astype(int))

h, PRN1pose = broadcast2posM(
    a,
    np.array([weekComp, seconds.astype(float)]).transpose(), 5)
aer2 = pd.DataFrame(azelrange(vec1_ECEF, PRN1pose),
                    columns=['Azimuth', 'Elevation', 'Range'])
Ex1 = np.zeros(len(seconds))
Ejemplo n.º 8
0
def qso_rates_chart(size, qsos_per_hour):
    """
    make the qsos per hour per band chart
    returns a pygame surface
    """
    title = 'QSOs per Hour by Band'
    qso_counts = [[], [], [], [], [], [], [], [], [], []]

    if qsos_per_hour is None or len(qsos_per_hour) == 0:
        return None, (0, 0)

    data_valid = len(qsos_per_hour) != 0

    for qpm in qsos_per_hour:
        for i in range(0, Bands.count()):
            c = qpm[i]
            cl = qso_counts[i]
            cl.append(c)

    logging.debug('make_plot(...,...,%s)', title)
    width_inches = size[0] / 100.0
    height_inches = size[1] / 100.0
    fig = plt.Figure(figsize=(width_inches, height_inches), dpi=100, tight_layout={'pad': 0.10}, facecolor='black')

    if matplotlib.__version__[0] == '1':
        ax = fig.add_subplot(111, axis_bgcolor='black')
    else:
        ax = fig.add_subplot(111, facecolor='black')

    ax.set_title(title, color='white', size=48, weight='bold')

    st = calendar.timegm(EVENT_START_TIME.timetuple())
    lt = calendar.timegm(qsos_per_hour[-1][0].timetuple())
    if data_valid:
        dates = matplotlib.dates.date2num(qso_counts[0])
        colors = ['r', 'g', 'b', 'c', 'm', 'y', '#ff9900', '#00ff00', '#663300']
        labels = Bands.BANDS_TITLE[1:]
        if lt < st:
            start_date = dates[0]  # matplotlib.dates.date2num(qsos_per_hour[0][0].timetuple())
            end_date = dates[-1]  # matplotlib.dates.date2num(qsos_per_hour[-1][0].timetuple())
        else:
            start_date = matplotlib.dates.date2num(EVENT_START_TIME)
            end_date = matplotlib.dates.date2num(EVENT_END_TIME)
        ax.set_xlim(start_date, end_date)

        ax.stackplot(dates, qso_counts[1], qso_counts[2], qso_counts[3], qso_counts[4], qso_counts[5], qso_counts[6],
                     qso_counts[7], qso_counts[8], qso_counts[9], labels=labels, colors=colors, linewidth=0.2)
        ax.grid(True)
        legend = ax.legend(loc='best', ncol=Bands.count() - 1)
        legend.get_frame().set_color((0, 0, 0, 0))
        legend.get_frame().set_edgecolor('w')
        for text in legend.get_texts():
            plt.setp(text, color='w')
        ax.spines['left'].set_color('w')
        ax.spines['right'].set_color('w')
        ax.spines['top'].set_color('w')
        ax.spines['bottom'].set_color('w')
        ax.tick_params(axis='y', colors='w')
        ax.tick_params(axis='x', colors='w')
        ax.set_ylabel('QSO Rate/Hour', color='w', size='x-large', weight='bold')
        ax.set_xlabel('UTC Hour', color='w', size='x-large', weight='bold')
        hour_locator = HourLocator()
        hour_formatter = DateFormatter('%H')
        ax.xaxis.set_major_locator(hour_locator)
        ax.xaxis.set_major_formatter(hour_formatter)
    canvas = agg.FigureCanvasAgg(fig)
    canvas.draw()
    renderer = canvas.get_renderer()
    raw_data = renderer.tostring_rgb()

    plt.close(fig)
    canvas_size = canvas.get_width_height()
    return raw_data, canvas_size
def plot_figure(month, day, plot_number):
    """ 
    Function to show plot deaths and cases from specified start date. 
    First argument: month
    Second argument: day
    """
    if plot_number == 1:

        output_file(
            '/Users/nathanmasters/Documents/GitHub/nathan-masters.github.io/assets/img/Bokeh/UK-CH_corona.html'
        )

        legend_alpha = 0.2

        p1 = figure(title="UK Cumulative Cases/Deaths",
                    plot_height=350,
                    plot_width=500,
                    y_axis_type="log",
                    x_axis_type='datetime',
                    x_range=(datetime.date(2020, month,
                                           day), datetime.date.today()),
                    sizing_mode='scale_width',
                    tools=["pan,reset,wheel_zoom"])

        p1.xaxis.axis_label = 'Date'
        p1.yaxis.axis_label = 'Cumulative Cases/Deaths'
        p1.line(UK_data["dateRep"],
                UK_data["cum_cases"] + 1,
                line_color="tomato",
                legend_label="Cases ({:,})".format(
                    UK_data["cum_cases"].iloc[-1]))
        p1.line(UK_data["dateRep"],
                UK_data["cum_deaths"] + 1,
                line_color="blue",
                legend_label="Deaths ({:,})".format(
                    UK_data["cum_deaths"].iloc[-1]))
        p1.legend.location = "top_left"
        p1.legend.background_fill_alpha = legend_alpha

        p2 = figure(title="UK Daily (7-day moving average) Today: {:,}".format(
            UK_data["cases"].iloc[-1]),
                    plot_height=350,
                    plot_width=500,
                    x_axis_type='datetime',
                    x_range=(datetime.date(2020, month,
                                           day), datetime.date.today()),
                    sizing_mode='scale_width',
                    tools=["pan,reset,wheel_zoom"])

        p2.xaxis.axis_label = 'Date'
        p2.yaxis.axis_label = 'Daily Cases/Deaths'
        p2.line(UK_data["dateRep"],
                UK_data["smooth_cases"] + 1,
                line_color="tomato",
                legend_label="Cases ({:,})".format(
                    int(UK_data["smooth_cases"].iloc[-1])))
        p2.line(UK_data["dateRep"],
                UK_data["smooth_deaths"] + 1,
                line_color="blue",
                legend_label="Deaths ({:,})".format(
                    int(UK_data["smooth_deaths"].iloc[-1])))
        p2.legend.location = "top_left"
        p2.legend.background_fill_alpha = legend_alpha

        p3 = figure(title="Swiss Cumulative Cases/Deaths",
                    plot_height=350,
                    plot_width=500,
                    y_axis_type="log",
                    x_axis_type='datetime',
                    x_range=(datetime.date(2020, month,
                                           day), datetime.date.today()),
                    sizing_mode='scale_width',
                    tools=["pan,reset,wheel_zoom"])

        p3.xaxis.axis_label = 'Date'
        p3.yaxis.axis_label = 'Cumulative Cases/Deaths'
        p3.line(Swiss_data["dateRep"],
                Swiss_data["cum_cases"] + 1,
                line_color="tomato",
                legend_label="Cases ({:,})".format(
                    Swiss_data["cum_cases"].iloc[-1]))
        p3.line(Swiss_data["dateRep"],
                Swiss_data["cum_deaths"] + 1,
                line_color="blue",
                legend_label="Deaths ({:,})".format(
                    Swiss_data["cum_deaths"].iloc[-1]))
        p3.legend.location = "top_left"
        p3.legend.background_fill_alpha = legend_alpha

        p4 = figure(
            title="Swiss Daily (7-day moving average) Today: {:,}".format(
                Swiss_data["cases"].iloc[-1]),
            plot_height=350,
            plot_width=500,
            x_axis_type='datetime',
            x_range=(datetime.date(2020, month, day), datetime.date.today()),
            sizing_mode='scale_width',
            tools=["pan,reset,wheel_zoom"])

        p4.xaxis.axis_label = 'Date'
        p4.yaxis.axis_label = 'Daily Cases/Deaths'
        p4.line(Swiss_data["dateRep"],
                Swiss_data["smooth_cases"] + 1,
                line_color="tomato",
                legend_label="Cases ({:,})".format(
                    int(Swiss_data["smooth_cases"].iloc[-1])))
        p4.line(Swiss_data["dateRep"],
                Swiss_data["smooth_deaths"] + 1,
                line_color="blue",
                legend_label="Deaths ({:,})".format(
                    int(Swiss_data["smooth_deaths"].iloc[-1])))
        p4.legend.location = "top_left"
        p4.legend.background_fill_alpha = legend_alpha

        grid = gridplot([p1, p3, p2, p4], ncols=2, sizing_mode="scale_width")

        show(grid)

    if plot_number == 2:
        output_file(
            '/Users/nathanmasters/Documents/GitHub/nathan-masters.github.io/assets/img/Bokeh/corona-comparisons.html'
        )
        legend_alpha = 0.2

        TOOLTIPS = [("Deaths", "$y{,}")]

        p_comp = figure(title="Cumulative Death Comparisons (as of {})".format(
            datetime.date.today()),
                        plot_height=350,
                        plot_width=500,
                        y_axis_type="log",
                        x_axis_type='datetime',
                        x_range=(datetime.date(2020, month,
                                               day), datetime.date.today()),
                        sizing_mode='scale_width',
                        tools=["hover,pan,reset,wheel_zoom"],
                        tooltips=TOOLTIPS)

        p_comp.xaxis.axis_label = 'Date'
        p_comp.yaxis.axis_label = 'Cumulative Deaths'
        p_comp.line(UK_data["dateRep"],
                    UK_data["cum_deaths"] + 1,
                    line_color="blue",
                    legend_label="UK ({:,})".format(
                        UK_data["cum_deaths"].iloc[-1]))
        p_comp.line(Swiss_data["dateRep"],
                    Swiss_data["cum_deaths"] + 1,
                    line_color="red",
                    legend_label="CH ({:,})".format(
                        Swiss_data["cum_deaths"].iloc[-1]))
        p_comp.line(US_data["dateRep"],
                    US_data["cum_deaths"] + 1,
                    line_color="green",
                    legend_label="US ({:,})".format(
                        US_data["cum_deaths"].iloc[-1]))
        p_comp.line(Sweden_data["dateRep"],
                    Sweden_data["cum_deaths"] + 1,
                    line_color="orange",
                    legend_label="SW ({:,})".format(
                        Sweden_data["cum_deaths"].iloc[-1]))
        p_comp.line(Brazil_data["dateRep"],
                    Brazil_data["cum_deaths"] + 1,
                    line_color="purple",
                    legend_label="BR ({:,})".format(
                        Brazil_data["cum_deaths"].iloc[-1]))
        p_comp.legend.location = "top_left"
        p_comp.legend.background_fill_alpha = legend_alpha

        show(p_comp)

    if plot_number == 3:

        countries = np.array(["UK: ", "CH: ", "US: ", "SW: ", "BR: "])

        country_number = np.array(list(range(5)))

        fig = plt.figure(figsize=(25, 10))

        ax1 = fig.add_subplot(251)
        ax2 = fig.add_subplot(252)
        ax3 = fig.add_subplot(253)
        ax4 = fig.add_subplot(254)
        ax5 = fig.add_subplot(255)

        ax1_2 = fig.add_subplot(256)
        ax2_2 = fig.add_subplot(257)
        ax3_2 = fig.add_subplot(258)
        ax4_2 = fig.add_subplot(259)
        ax5_2 = fig.add_subplot(2, 5, 10)

        ax1.plot(UK_data["dateRep"],
                 UK_data["cum_cases"] + 1,
                 label="UK Cases")
        ax1.plot(UK_data["dateRep"],
                 UK_data["cum_deaths"] + 1,
                 label="UK Deaths")
        # ax1.plot(UK_data["dateRep"], UK_data["spline_cum_cases"], label="UK Cases Spline")

        # ax1_2 = ax1.twinx()
        # ax1_2.plot(last_thirty_days[0], last_thirty_days["UK_cases_regression"], label="Last 5 Days Trend")

        ax2.plot(Swiss_data["dateRep"],
                 Swiss_data["cum_cases"] + 1,
                 label="Swiss Cases")
        ax2.plot(Swiss_data["dateRep"],
                 Swiss_data["cum_deaths"] + 1,
                 label="Swiss Deaths")

        ax3.plot(US_data["dateRep"],
                 US_data["cum_cases"] + 1,
                 label="US Cases")
        ax3.plot(US_data["dateRep"],
                 US_data["cum_deaths"] + 1,
                 label="US Deaths")

        ax4.plot(Sweden_data["dateRep"],
                 Sweden_data["cum_cases"] + 1,
                 label="Sweden Cases")
        ax4.plot(Sweden_data["dateRep"],
                 Sweden_data["cum_deaths"] + 1,
                 label="Sweden Deaths")

        ax5.plot(Brazil_data["dateRep"],
                 Brazil_data["cum_cases"] + 1,
                 label="Brazil Cases")
        ax5.plot(Brazil_data["dateRep"],
                 Brazil_data["cum_deaths"] + 1,
                 label="Brazil Deaths")

        ax1_2.plot(UK_data["dateRep"],
                   UK_data["smooth_cases"] + 1,
                   label="UK Cases")
        ax1_2.plot(UK_data["dateRep"],
                   UK_data["smooth_deaths"] + 1,
                   label="UK Deaths")
        # ax1_2.plot(UK_data["dateRep"], UK_data["spline_cases"]+1, label="UK Spline Cases")

        ax2_2.plot(Swiss_data["dateRep"],
                   Swiss_data["smooth_cases"] + 1,
                   label="Swiss Cases")
        ax2_2.plot(Swiss_data["dateRep"],
                   Swiss_data["smooth_deaths"] + 1,
                   label="Swiss Deaths")

        ax3_2.plot(US_data["dateRep"],
                   US_data["smooth_cases"] + 1,
                   label="US Cases")
        ax3_2.plot(US_data["dateRep"],
                   US_data["smooth_deaths"] + 1,
                   label="US Deaths")

        ax4_2.plot(Sweden_data["dateRep"],
                   Sweden_data["smooth_cases"] + 1,
                   label="Sweden Cases")
        ax4_2.plot(Sweden_data["dateRep"],
                   Sweden_data["smooth_deaths"] + 1,
                   label="Sweden Deaths")

        ax5_2.plot(Brazil_data["dateRep"],
                   Brazil_data["smooth_cases"] + 1,
                   label="Brazil Cases")
        ax5_2.plot(Brazil_data["dateRep"],
                   Brazil_data["smooth_deaths"] + 1,
                   label="Brazil Deaths")

        # ax.plot(UK_regression_7days["Date"], UK_regression_7days.Fit, label="UK 7 day trend")
        # ax.plot(UK_regression_28days["Date"], UK_regression_28days.Fit, label="UK 28 day trend")

        # latest_data = np.array([
        #     UK_data["cum_deaths"].iloc[-1],
        #     Swiss_data["cum_deaths"].iloc[-1],
        #     US_data["cum_deaths"].iloc[-1],
        #     ])
        # print(latest_data)

        ax1.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax2.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax3.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax4.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax5.set_xticklabels(UK_data["dateRep"], rotation=45)

        ax1_2.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax2_2.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax3_2.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax4_2.set_xticklabels(UK_data["dateRep"], rotation=45)
        ax5_2.set_xticklabels(UK_data["dateRep"], rotation=45)

        ax1.set(
            ylabel="Cumulative Cases/Deaths",
            ylim=[1, 5000000],
            yscale="log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="UK Data",
        )
        ax1.legend(loc="upper left")

        # ax1_2.set(
        #     ylabel="Cumulative Cases/Deaths",
        #     ylim = [1, 150000],
        #     # yscale = "log",
        #     xlabel = "Date",
        #     xlim = [datetime.date(2020, month, day), datetime.date.today()],
        #     # title = "UK Data",
        #     )
        # ax1.legend(loc="upper left")

        ax2.set(
            ylabel="Cumulative Cases/Deaths",
            ylim=[1, 5000000],
            yscale="log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="Swiss Data",
        )
        ax2.legend(loc="upper left")

        ax3.set(
            ylabel="Cumulative Cases/Deaths",
            ylim=[1, 5000000],
            yscale="log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="US Data",
        )
        ax3.legend(loc="upper left")

        ax4.set(
            ylabel="Cumulative Cases/Deaths",
            ylim=[1, 5000000],
            yscale="log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="Sweden Data",
        )
        ax4.legend(loc="upper left")

        ax5.set(
            ylabel="Cumulative Cases/Deaths",
            ylim=[1, 5000000],
            yscale="log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="Brazil Data",
        )
        ax5.legend(loc="upper left")

        ax1_2.set(
            ylabel="Cases/Deaths per day",
            # ylim = [1, 10000],
            # yscale = "log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="UK Data",
        )
        ax1_2.legend(loc="upper left")

        ax2_2.set(
            ylabel="Cases/Deaths per day",
            # ylim = [1, 10000],
            # yscale = "log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="Swiss Data",
        )
        ax2_2.legend(loc="upper left")

        ax3_2.set(
            ylabel="Cases/Deaths per day",
            # ylim = [1, 50000],
            # yscale = "log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="US Data",
        )
        ax3_2.legend(loc="upper left")

        ax4_2.set(
            ylabel="Cases/Deaths per day",
            # ylim = [1, 50000],
            # yscale = "log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="Sweden Data",
        )
        ax4_2.legend(loc="upper left")

        ax5_2.set(
            ylabel="Cases/Deaths per day",
            # ylim = [1, 50000],
            # yscale = "log",
            xlabel="Date",
            xlim=[datetime.date(2020, month, day),
                  datetime.date.today()],
            title="Brazil Data",
        )
        ax5_2.legend(loc="upper left")

        date_form = DateFormatter("%m-%d")
        ax1.xaxis.set_major_formatter(date_form)
        ax1.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax2.xaxis.set_major_formatter(date_form)
        ax2.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax3.xaxis.set_major_formatter(date_form)
        ax3.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax4.xaxis.set_major_formatter(date_form)
        ax4.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax5.xaxis.set_major_formatter(date_form)
        ax5.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))

        ax1_2.xaxis.set_major_formatter(date_form)
        ax1_2.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax2_2.xaxis.set_major_formatter(date_form)
        ax2_2.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax3_2.xaxis.set_major_formatter(date_form)
        ax3_2.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax4_2.xaxis.set_major_formatter(date_form)
        ax4_2.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))
        ax5_2.xaxis.set_major_formatter(date_form)
        ax5_2.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))

        for i, j, k in zip(latest_death_data, latest_cases_data,
                           country_number):
            if k == 0:
                ax1.annotate(
                    '%.0f' % i,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        i),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax1.annotate(
                    '%.0f' % j,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        j),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax1.annotate("Death/Case Ratio: %.2f" % (i / j),
                             xy=(0.5, 0.5),
                             xycoords='axes fraction',
                             xytext=(0.03, 0.83),
                             textcoords='axes fraction',
                             horizontalalignment='left',
                             verticalalignment='top')

            if k == 1:
                ax2.annotate(
                    '%.0f' % i,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        i),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax2.annotate(
                    '%.0f' % j,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        j),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax2.annotate("Death/Case Ratio: %.2f" % (i / j),
                             xy=(0.5, 0.5),
                             xycoords='axes fraction',
                             xytext=(0.03, 0.83),
                             textcoords='axes fraction',
                             horizontalalignment='left',
                             verticalalignment='top')

            if k == 2:
                ax3.annotate(
                    '%.0f' % i,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        i),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax3.annotate(
                    '%.0f' % j,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        j),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax3.annotate("Death/Case Ratio: %.2f" % (i / j),
                             xy=(0.5, 0.5),
                             xycoords='axes fraction',
                             xytext=(0.03, 0.83),
                             textcoords='axes fraction',
                             horizontalalignment='left',
                             verticalalignment='top')

            if k == 3:
                ax4.annotate(
                    '%.0f' % i,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        i),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax4.annotate(
                    '%.0f' % j,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        j),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax4.annotate("Death/Case Ratio: %.2f" % (i / j),
                             xy=(0.5, 0.5),
                             xycoords='axes fraction',
                             xytext=(0.03, 0.83),
                             textcoords='axes fraction',
                             horizontalalignment='left',
                             verticalalignment='top')

            if k == 4:
                ax5.annotate(
                    '%.0f' % i,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        i),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax5.annotate(
                    '%.0f' % j,
                    xy=((datetime.date.today() - datetime.timedelta(days=1)),
                        j),
                    xycoords='data',
                    xytext=(-1, 0),
                    textcoords='offset points',
                    # arrowprops=dict(facecolor='black', shrink=0.05),
                    horizontalalignment='right',
                    verticalalignment='bottom')
                ax5.annotate("Death/Case Ratio: %.2f" % (i / j),
                             xy=(0.5, 0.5),
                             xycoords='axes fraction',
                             xytext=(0.03, 0.83),
                             textcoords='axes fraction',
                             horizontalalignment='left',
                             verticalalignment='top')

        ax1_2.annotate("7 day change: \ncases: " +
                       "{:.1%}".format(UK_change_cases) + "\ndeaths: " +
                       "{:.1%}".format(UK_change_deaths),
                       xy=(0.5, 0.5),
                       xycoords='axes fraction',
                       xytext=(0.03, 0.83),
                       textcoords='axes fraction',
                       horizontalalignment='left',
                       verticalalignment='top')

        ax2_2.annotate("7 day change: \ncases: " +
                       "{:.1%}".format(Swiss_change_cases) + "\ndeaths: " +
                       "{:.1%}".format(Swiss_change_deaths),
                       xy=(0.5, 0.5),
                       xycoords='axes fraction',
                       xytext=(0.03, 0.83),
                       textcoords='axes fraction',
                       horizontalalignment='left',
                       verticalalignment='top')

        ax3_2.annotate("7 day change: \ncases: " +
                       "{:.1%}".format(US_change_cases) + "\ndeaths: " +
                       "{:.1%}".format(US_change_deaths),
                       xy=(0.5, 0.5),
                       xycoords='axes fraction',
                       xytext=(0.03, 0.83),
                       textcoords='axes fraction',
                       horizontalalignment='left',
                       verticalalignment='top')

        ax4_2.annotate("7 day change: \ncases: " +
                       "{:.1%}".format(Sweden_change_cases) + "\ndeaths: " +
                       "{:.1%}".format(Sweden_change_deaths),
                       xy=(0.5, 0.5),
                       xycoords='axes fraction',
                       xytext=(0.03, 0.83),
                       textcoords='axes fraction',
                       horizontalalignment='left',
                       verticalalignment='top')

        ax5_2.annotate("7 day change: \ncases: " +
                       "{:.1%}".format(Brazil_change_cases) + "\ndeaths: " +
                       "{:.1%}".format(Brazil_change_deaths),
                       xy=(0.5, 0.5),
                       xycoords='axes fraction',
                       xytext=(0.03, 0.83),
                       textcoords='axes fraction',
                       horizontalalignment='left',
                       verticalalignment='top')

        fig.tight_layout(pad=3.0)

        plt.savefig("Latest Plot.pdf", dpi=300)
Ejemplo n.º 10
0
    def plot(self,
             graph='bpt_total',
             time_int=30,
             label=None,
             l_opt=False,
             ttl=None,
             trend=False,
             pnts=False):
        # Check whether 'Legend' is set and customize plot mode
        if l_opt:
            ax = pylab.subplot(2, 1, 1)
        else:
            ax = pylab.subplot(1, 1, 1)

        # Set graph title
        pylab.title(ttl)

        # Extract data points for specified time interval, transaction label and graph type
        points = self.log_agg(time_int, label, graph)

        # Adjust range
        throughput_coeff = 1
        time_coeff = 1
        if self.throughput_range and graph.count('bpt'):
            for key, value in points.items():
                points[key] = points[key] / 1024.0
        if self.time_range and (graph.count('lat') or graph.count('art')):
            for key, value in points.items():
                points[key] = points[key] / 1000.0

        # Set graph label
        if graph == 'bpt_total': label = 'Total Throughput'
        elif graph == 'rpt_total': label = 'Total Hits'
        elif graph == 'err_total': label = 'Total Error Rate'
        elif graph == 'errc_total': label = 'Total Error Count'

        # Initializes data points arrays
        x = list()
        y = list()

        for key in sorted(points.keys()):
            # Defines time value (X axis)
            days = key / 86400
            hours = (key - 86400 * days) / 3600
            minutes = (key - 86400 * days - 3600 * hours) / 60
            seconds = key - 86400 * days - 3600 * hours - 60 * minutes
            days += 1
            x.append(datetime(1970, 1, days, hours, minutes, seconds))
            # Defines time value (Y axis)
            y.append(points[key])

        # Check whether 'Points' is set and customize graph
        if pnts:
            pylab.plot(x,
                       y,
                       linestyle='solid',
                       marker='.',
                       markersize=5,
                       label=label,
                       linewidth=0.5)
        else:
            pylab.plot(x, y, label=label, linewidth=0.5)

        # Check whether 'Trend' is set and customize graph
        if trend:
            pylab.plot(x, self.trend(y), label=label + ' (Trend)', linewidth=1)

        # Activate grid mode
        pylab.grid(True)

        # Evaluate time markers
        max_min = self.end / 60
        min_min = self.start / 60

        time_int = (int((max_min - min_min) / 10.0)) / 10 * 10

        if not time_int:
            if max_min > 75:
                time_int = 10
            else:
                time_int = 5

        if time_int > 30:
            time_int = 60

        if time_int <= 60:
            pylab.xlabel('Elapsed time (hh:mm)')
            ax.xaxis.set_major_locator(
                MinuteLocator(arange(0, max_min, time_int)))
            ax.xaxis.set_minor_locator(
                MinuteLocator(arange(0, max_min, time_int / 5)))
            ax.xaxis.set_major_formatter(DateFormatter('%H:%M'))
        else:
            pylab.xlabel('Elapsed time (dd;hh:mm)')
            labels = pylab.ax.get_xticklabels()
            ax.xaxis.set_major_formatter(DateFormatter('%d;%H:%M'))
            pylab.setp(labels, rotation=0, fontsize=8)

        # Check whether 'Legend' is set and customize graph
        if l_opt:
            pylab.legend(bbox_to_anchor=(0, -0.2), loc=2, ncol=1)
Ejemplo n.º 11
0
else:
    print("File read succesfful")
    ak = S.keys()[3::3]
    sk = S.keys()[4::3]

    start = S[S['time'] == '04:00'].index.values[0]

    S = S[start:]
    t = S.keys()[2]
    t = pd.to_datetime(S[t], format='%H:%M')
    start = S[S['time'] == '09:30'].index.values[0]
    end = S[S['time'] == '16:00'].index.values[0] + 1

    fig, axs = plt.subplots(5, 2, figsize=(15, 15))
    fig.canvas.set_window_title('INCLUDING PRE-MARKET')
    min_form = DateFormatter("%H:%M")

    titles = ["Volume 1 min average","Volume 5 min average","Volume 30 min average", "Volume accumulative average"\
            ,"Range 1 min average","Range 5 min average","Range 30 min average"\
            ,"ROC 1 min average","ROC 5 min average","ROC 30 min average"]
    k = 0
    for i in range(5):
        for j in range(2):
            if i >= 1 and j == 3:
                axs[i, j].axis('off')
            else:
                mean = np.array(S[ak[k]], dtype=float)
                std = np.array(S[sk[k]], dtype=float)
                #print(mean)
                axs[i, j].xaxis.set_major_formatter(min_form)
                axs[i, j].xaxis.set_major_locator(
Ejemplo n.º 12
0
fig = plt.figure(figsize=(8,10))
ax = fig.add_subplot(212)
xx=range(1901,2006)
xdates = [datetime.datetime.strptime(str(int(date)),'%Y') for date in xx]
ax.plot_date(xdates,finalcnew,"k-",label=r"$\Delta$ CO$_{2}$",linewidth='3')
ax.plot_date(xdates,finalclinew,"r-",label=r"$\Delta$ Climate",linewidth='3')
ax.plot_date(xdates,finalnnew,"g-",label=r"$\Delta$ NF",linewidth='3')
ax.plot_date(xdates,finalinew,"b-",label=r"$\Delta$ Irrigation",linewidth='3')
plt.xlim(xdates[1],xdates[104])

leg = plt.legend(loc=4,fancybox=True, fontsize=18)
leg.get_frame().set_alpha(0.5)

#ax.set_ylim([0,14])
ax.xaxis.set_major_formatter(DateFormatter('%Y'))
plt.tick_params(axis='both',labelsize=18)

plt.xlabel("Year",fontsize=18)

plt.ylabel('Effect on soybean yield (%)',fontsize=18)


ax = fig.add_subplot(211)

params = {'mathtext.default': 'regular' }          
plt.rcParams.update(params)
ax.plot_date(xdates,allynew,"y-",label=" $S_{all}$",linewidth='3')
ax.plot_date(xdates,allycnew,"k-",label=" $S_{CO2}$",linewidth='3')
ax.plot_date(xdates,allyclinew,"r-",label="$S_{Climate}$",linewidth='3')
ax.plot_date(xdates,allynnew,"g-",label=" $S_{NF}$",linewidth='3')
dic = {
    'temp_time': t_temp,
    'evid': evid,
    'det_time': t_det,
    'chan': chan,
    'det_val': det_val,
    'avg_cc': avg_cc
}
df = pd.DataFrame(dic)

g = df.groupby('evid')
keys = g.groups.keys()
keys = [str(k) for k in keys]

#figure
date_format = DateFormatter("%Y-%m-%d")
cmap = plt.get_cmap('jet', 30)
fig, ax1 = plt.subplots(figsize=(18, 8))
im = ax1.scatter(t_det_mpl,
                 df.avg_cc,
                 s=10 * df.chan.astype(float) * 3,
                 c=g.ngroup(),
                 cmap=cmap,
                 alpha=0.55,
                 label='Detected Events')
ax1.plot(np.full((2),
                 UTCDateTime(2014, 3, 10).matplotlib_date), [0.6, 1],
         'r')  #plot M6.8 event

cbar = fig.colorbar(im, ax=ax1, cmap=cmap)
Ejemplo n.º 14
0
infile = "/scratch/dknapp4/Western_Hawaii/Moorea/moorea_sample_coral_output_sr_20190716.csv"
tdata = pd.read_csv(infile)
header = list(tdata)
header.pop(0)
thedates = np.array(tdata['Date'], dtype='S8')
data = np.asarray(tdata[header])
np.save("quick1.npy", data)
np.save("quick2.npy", thedates)
## data = np.load("quick1.npy")
## thedates = np.load("quick2.npy")
thedates = np.asarray(
    [date(int(day[0:4]), int(day[4:6]), int(day[6:8])) for day in thedates])

rule = rrulewrapper(MONTHLY, interval=1)
loc = RRuleLocator(rule)
formatter = DateFormatter('%m/%d/%y')

with PdfPages('coral_change_moorea_sr_mean_rev20190716.pdf') as pdf:
    ## Page 1, Red
    fig = plt.figure(figsize=(8, 10))

    ax = plt.subplot(2, 1, 1)
    ax.set_title('')
    good = np.not_equal(data[:, 0], -9.)
    tmean = np.mean(data[good, 0:8], axis=1)
    tsdev = np.std(data[good, 0:8], axis=1)
    plt.errorbar(thedates[good], tmean, yerr=tsdev, fmt='-bo', capsize=3)
    ax.xaxis.set_major_locator(loc)
    ax.xaxis.set_major_formatter(formatter)
    ax.xaxis.set_tick_params(rotation=30, labelsize=10)
    ## plt.plot_date(thedates[good], tmean, 'b')
Ejemplo n.º 15
0
    ax.axvline(x=medAmp, color='red', linestyle='dashed')
    plt.text(
        3 / 2 * medAmp, np.log10(max(hist)),
        "Median $A_{max}$: " + str(np.round(medAmp, decimals=10)) + " m/s")
    plt.savefig(templatePath + "clustering/" + str(numCluster) + "/" +
                "cluster_" + str(c) + "_amplitude_distribution.png")
    plt.close()

    # make simple histogram of times
    startTime = waves[0].stats.starttime.datetime
    endTime = waves[-1].stats.starttime.datetime
    numDays = (endTime - startTime).days + 1
    plotDates = date2num(detTimes)
    plt.hist(plotDates, numDays)
    ax = plt.gca()
    ax.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
    plt.title("Timeseries of events in cluster " + str(c))
    plt.xlabel("Date")
    plt.ylabel("Detection count")
    plt.gcf().autofmt_xdate()
    plt.savefig(templatePath + "clustering/" + str(numCluster) + "/" +
                "cluster_" + str(c) + "_time_distribution.png")
    plt.close()

    # make spectra figure
    freq = np.fft.fftfreq(snipLen * fs + 1, 1 / fs)
    for n in range(len(clusterEvents)):
        try:
            plt.plot(freq[0:int(snipLen * fs / 2)],
                     cluster_spectra[n][0:int(snipLen * fs / 2)],
                     'k',
Ejemplo n.º 16
0
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.dates import date2num, DateFormatter
import shelve
import sys
from beem.utils import addTzInfo
from datetime import datetime

dateFmt = DateFormatter('%Y-%m-%d')
start = addTzInfo(datetime(2018, 6, 10))
stop = addTzInfo(datetime(2018, 6, 12))

s = shelve.open("accounts.active.shelve")
accounts = s['accounts']
s.close()

alike_names = []
creation_dates = []

with open("alike_names.txt") as f:
    for line in f.readlines():
        name = line[:-1]
        creation_dates.append(date2num(accounts[name]['created']))
        sys.stdout.write("%s\r" % (name))
        if start <= accounts[name]['created'] <= stop:
            alike_names.append(name)

print("\n", len(alike_names), len(creation_dates))
print(alike_names)
Ejemplo n.º 17
0
def main(start_year=1980, end_year=1989):

    soil_layer_widths = infovar.soil_layer_widths_26_to_60
    soil_tops = np.cumsum(soil_layer_widths).tolist()[:-1]
    soil_tops = [
        0,
    ] + soil_tops

    selected_station_ids = [
        "061905", "074903", "090613", "092715", "093801", "093806"
    ]

    #    path1 = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl_spinup.hdf"
    #    label1 = "CRCM5-HCD-RL"

    path1 = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_spinup_ITFS.hdf5"
    label1 = "CRCM5-HCD-RL-INTFL"

    path2 = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_ITFS_avoid_truncation1979-1989.hdf5"
    label2 = "CRCM5-HCD-RL-INTFL-improved"

    ############
    images_folder = "images_for_lake-river_paper/comp_soil_profiles"
    if not os.path.isdir(images_folder):
        os.mkdir(images_folder)

    fldirs = analysis.get_array_from_file(
        path=path1, var_name=infovar.HDF_FLOW_DIRECTIONS_NAME)
    lons2d, lats2d, basemap = analysis.get_basemap_from_hdf(path1)

    lake_fractions = analysis.get_array_from_file(
        path=path1, var_name=infovar.HDF_LAKE_FRACTION_NAME)
    cell_areas = analysis.get_array_from_file(
        path=path1, var_name=infovar.HDF_CELL_AREA_NAME_M2)
    acc_areakm2 = analysis.get_array_from_file(
        path=path1, var_name=infovar.HDF_ACCUMULATION_AREA_NAME)
    depth_to_bedrock = analysis.get_array_from_file(
        path=path1, var_name=infovar.HDF_DEPTH_TO_BEDROCK_NAME)

    cell_manager = CellManager(fldirs,
                               lons2d=lons2d,
                               lats2d=lats2d,
                               accumulation_area_km2=acc_areakm2)

    #get climatologic liquid soil moisture and convert fractions to mm
    t0 = time.clock()
    daily_dates, levels, i1_nointfl = analysis.get_daily_climatology_of_3d_field(
        path_to_hdf_file=path1,
        var_name="I1",
        start_year=start_year,
        end_year=end_year)
    print("read I1 - 1")
    print("Spent {0} seconds ".format(time.clock() - t0))

    _, _, i1_intfl = analysis.get_daily_climatology_of_3d_field(
        path_to_hdf_file=path2,
        var_name="I1",
        start_year=start_year,
        end_year=end_year)
    print("read I1 - 2")

    #get climatologic frozen soil moisture and convert fractions to mm
    _, _, i2_nointfl = analysis.get_daily_climatology_of_3d_field(
        path_to_hdf_file=path1,
        var_name="I2",
        start_year=start_year,
        end_year=end_year)
    print("read I2 - 1")

    _, _, i2_intfl = analysis.get_daily_climatology_of_3d_field(
        path_to_hdf_file=path2,
        var_name="I2",
        start_year=start_year,
        end_year=end_year)
    print("read I2 - 2")
    #
    sm_intfl = i1_intfl + i2_intfl
    sm_nointfl = i1_nointfl + i2_nointfl

    #Get the list of stations to do the comparison with
    stations = cehq_station.read_station_data(
        start_date=datetime(start_year, 1, 1),
        end_date=datetime(end_year, 12, 31),
        selected_ids=selected_station_ids)

    print("sm_noinfl, min, max = {0}, {1}".format(sm_nointfl.min(),
                                                  sm_nointfl.max()))
    print("sm_infl, min, max = {0}, {1}".format(sm_intfl.min(),
                                                sm_intfl.max()))
    diff = (sm_intfl - sm_nointfl)
    #diff *= soil_layer_widths[np.newaxis, :, np.newaxis, np.newaxis] * 1000  # to convert in mm

    #print "number of nans", np.isnan(diff).astype(int).sum()

    print("cell area min,max = {0}, {1}".format(cell_areas.min(),
                                                cell_areas.max()))
    print("acc area min,max = {0}, {1}".format(acc_areakm2.min(),
                                               acc_areakm2.max()))

    assert np.all(lake_fractions >= 0)
    print("lake fractions (min, max): ", lake_fractions.min(),
          lake_fractions.max())

    #Non need to go very deep
    nlayers = 3
    z, t = np.meshgrid(soil_tops[:nlayers], date2num(daily_dates))
    station_to_mp = cell_manager.get_model_points_for_stations(stations)

    plotted_global = False

    for the_station, mp in station_to_mp.items():
        assert isinstance(mp, ModelPoint)
        assert isinstance(the_station, Station)
        fig = plt.figure()
        umask = cell_manager.get_mask_of_upstream_cells_connected_with_by_indices(
            mp.ix, mp.jy)

        #exclude lake cells from the profiles
        sel = (umask == 1) & (depth_to_bedrock > 3) & (acc_areakm2 >= 0)

        umaskf = umask.astype(float)
        umaskf *= (1.0 - lake_fractions) * cell_areas
        umaskf[~sel] = 0.0

        profiles = np.tensordot(diff, umaskf) / umaskf.sum()
        print(profiles.shape, profiles.min(), profiles.max(), umaskf.sum(),
              umaskf.min(), umaskf.max())

        d = np.abs(profiles).max()
        print("d = {0}".format(d))
        clevs = np.round(np.linspace(-d, d, 12), decimals=5)

        diff_cmap = cm.get_cmap("RdBu_r", lut=len(clevs) - 1)
        bn = BoundaryNorm(clevs, len(clevs) - 1)

        plt.title("({})-({})".format(label2, label2))
        img = plt.contourf(t,
                           z,
                           profiles[:, :nlayers],
                           cmap=diff_cmap,
                           levels=clevs,
                           norm=bn)
        plt.colorbar(img, ticks=clevs)
        ax = plt.gca()
        assert isinstance(ax, Axes)

        ax.invert_yaxis()
        ax.xaxis.set_major_formatter(DateFormatter("%b"))
        ax.xaxis.set_major_locator(MonthLocator())

        fig.savefig(os.path.join(
            images_folder, "{0}_{1}_{2}.jpeg".format(the_station.id, label1,
                                                     label2)),
                    dpi=cpp.FIG_SAVE_DPI,
                    bbox_inches="tight")

        print("processed: {0}".format(the_station))
        if not plotted_global:
            plotted_global = True
            fig = plt.figure()
            sel = (depth_to_bedrock >= 0.1) & (acc_areakm2 >= 0)

            umaskf = (1.0 - lake_fractions) * cell_areas
            umaskf[~sel] = 0.0

            profiles = np.tensordot(diff, umaskf) / umaskf.sum()
            print(profiles.shape, profiles.min(), profiles.max(), umaskf.sum(),
                  umaskf.min(), umaskf.max())

            d = np.abs(profiles).max()
            print("d = {0}".format(d))
            clevs = np.round(np.linspace(-d, d, 12), decimals=5)

            diff_cmap = cm.get_cmap("RdBu_r", lut=len(clevs) - 1)
            bn = BoundaryNorm(clevs, len(clevs) - 1)

            img = plt.contourf(t,
                               z,
                               profiles[:, :nlayers],
                               cmap=diff_cmap,
                               levels=clevs,
                               norm=bn)
            plt.colorbar(img, ticks=clevs)
            ax = plt.gca()
            assert isinstance(ax, Axes)

            ax.invert_yaxis()
            ax.xaxis.set_major_formatter(DateFormatter("%b"))
            ax.xaxis.set_major_locator(MonthLocator())

            fig.savefig(os.path.join(images_folder, "global_mean.jpeg"),
                        dpi=cpp.FIG_SAVE_DPI,
                        bbox_inches="tight")

    pass
Ejemplo n.º 18
0
def show_per_site_model_performance_one_to_one(sites_and_dates, y_pred,
                                               ytrain):
    path_fig = r"\\ad.utwente.nl\home\garciamartii\Documents\PhD\Papers\Journals\02_IJGIS\images\print_v3\{0}"
    name_fig = "Figure_03_General_Performance_per_site.png"
    shortYearFmt = DateFormatter("%y")
    print(len(y_pred), len(ytrain), len(sites_and_dates))
    for i in range(len(sites_and_dates)):
        a_pred = y_pred[i]
        a_train = ytrain[i]
        newtuple = sites_and_dates[i] + (a_pred, a_train)
        sites_and_dates[i] = newtuple

    l = list(sorted(sites_and_dates, key=lambda x: x[0]))
    dic_sites = defaultdict(list)
    for tup in l:
        key = tup[0]
        dic_sites[key].append(tup[1:])

    for key in sorted(dic_sites.keys()):
        sorted_dates = list(sorted(dic_sites[key], key=lambda x: x[0]))
        dic_sites[key] = sorted_dates
    print("Total sites: ", len(dic_sites.keys()))

    i = 1
    dicscores = []
    plt.suptitle(
        "Performance of RF predicting the AQT for each individual flagging site",
        fontsize=36)
    plt.subplots_adjust(hspace=.6, wspace=.6)
    for key in sorted(dic_sites.keys()):
        the_site = dic_sites[key]
        the_pred = np.array([item[1] for item in the_site])
        the_obs = np.array([item[2] for item in the_site])

        sort_dates, sort_target, sort_fit = zip(
            *sorted(zip(the_site, the_obs, the_pred)))
        score = np.round(r2_score(sort_target, sort_fit), decimals=2)
        dicscores.append([key, score])

        ax = plt.subplot(5, 6, i)
        plt.title("{0} (R2: {1})".format(key, score),
                  size=16,
                  fontweight="bold",
                  y=1.08)
        plt.ylim(0, 100)

        if i in [13]:
            ax.yaxis.labelpad = 20
            plt.ylabel("True values", size=30)
        if i in [27]:
            ax.xaxis.set_label_coords(1.1, -0.5)
            plt.xlabel("Predictions", size=30)

        plt.plot(the_pred,
                 the_obs,
                 'o',
                 label=u'Observations',
                 color="darkblue",
                 markeredgewidth=1.0,
                 markeredgecolor='black')
        plt.plot(the_obs, the_obs, "r-", label="1:1", linewidth=2)

        ax.get_xaxis().set_minor_locator(mpl.ticker.AutoMinorLocator())
        ax.get_yaxis().set_minor_locator(mpl.ticker.AutoMinorLocator())
        ax.grid(b=True, which='major', color='#A8A8A8', linewidth=0.5)
        ax.grid(b=True, which='major', color='#A8A8A8', linewidth=0.5)

        i += 1

    # plt.savefig(path_fig.format(name_fig), dpi=300)
    plt.show()
    return dicscores
Ejemplo n.º 19
0
for i in range(0,len(cat_present)):
    j=cat_present[i]
    text_temp=text_data[j]	
    #this writes a list
    cat_present_list.append(text_temp[18:44])
    #this converts to datetime objects
    cat_present_dt.append((datetime.datetime.strptime(text_temp[18:44], "%Y-%m-%d %H:%M:%S.%f")))    
	#also keep track of which hours cats are present, this is equally ugly
    cat_present_hour.append(int(text_temp[29:31]))
   
if plot_history == 1:    
    fig, ax = plt.subplots()
    ax.plot_date(time_data_dt, cat_status)
    ax.xaxis.set_major_locator(DayLocator())
    ax.xaxis.set_minor_locator(HourLocator(arange(0, 25, 6)))
    ax.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
    ax.fmt_xdata = DateFormatter('%Y-%m-%d %H:%M:%S')
    fig.autofmt_xdate()
    plt.title("Historical cat occupancy in yurt")
    plt.ylabel("Cat status")
    plt.xlabel("Date")
    fig.savefig('historical_cat_yurt_status.png')
    

#try a histogram to see which days are most popular
end = datetime.date(2017,7,4) 
start = datetime.date(2017,6,28) 
one_day = datetime.timedelta(days = 1)  

week = [] 
for i in range((end-start).days+1):  
Ejemplo n.º 20
0
        marker='x',
        markersize=4,
        alpha=0.7,
        color='r')
ax.plot(guatemala['new_cases_SEIRD'],
        label='Predicción modelo SEIRD - Infectados activos',
        alpha=0.7,
        color='r')

# Set title and labels for axes
ax.set(xlabel='Tiempo (Fecha)',
       ylabel='Infectados activos',
       title='Nuevos casos reportado MSPAS y resultado modelo SEIRD')

# Define the date format
date_form = DateFormatter("%d-%m-%Y")
ax.xaxis.set_major_formatter(date_form)

# Ensure a major tick for each week using (interval=1)
ax.xaxis.set_major_locator(mdates.WeekdayLocator(interval=1))

plt.xticks(rotation=45)
plt.legend(loc=0)
plt.grid()
plt.savefig(path_output + 'infected_SEIRD.jpg', dpi=500, bbox_inches='tight')
plt.show()

#%% Plotting new deaths

fig, ax = plt.subplots(figsize=(12, 8))
ax.plot(guatemala['new_deaths'].cumsum(),
Ejemplo n.º 21
0
def plot_by_dates(players) :
    markers = ['o', 'v', '^', 's', 'p', '*', 'h', 'H', 'D', 'd']
    mfc = ['#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c',
           '#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5',
           '#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f',
           '#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5']    
    ls = ['dashed', 'dashdot', 'dotted']

    # Trace back to two years ago
    ending_date = datetime.datetime.now()
    starting_date = ending_date - datetime.timedelta(days=365*ctrl["display_history"])

    # every monday
    mondays = WeekdayLocator(MONDAY)
    # every the other month
    months = MonthLocator(range(1, 13), bymonthday=1, interval=1)
    monthsFmt = DateFormatter("%b '%y")

    fig, ax = plt.subplots(figsize=(20, 10))
    
    # Remove the plot frame lines. They are unnecessary here.
    ax.spines['top'].set_visible(False)
    ax.spines['bottom'].set_visible(False)
    ax.spines['right'].set_visible(False)
    ax.spines['left'].set_visible(False)

    # Ensure that the axis ticks only show up on the bottom and left of the plot.
    # Ticks on the right and top of the plot are generally unnecessary.
    ax.get_xaxis().tick_bottom()
    ax.get_yaxis().tick_left()    

    # Sort players according to their latest ranking in descending order
    sorted_players = sorted(players, key=lambda player: player.ranking(), reverse=True)

    for player in sorted_players:
        # Plot setup
        scores_and_dates = [(event.Score, event.Date) for event in player.Events \
                            if event.Date >= starting_date and event.Date <= ending_date]
        (scores, dates) = map(list, zip(*scores_and_dates))

        _mfc = random.choice(mfc)
        ax.plot_date(dates, scores,
                     ls=random.choice(ls), marker=random.choice(markers),
                     markerfacecolor=_mfc,
                     label=player.Name+":"+str(player.ranking()))

        # Annotate the peak
        y, x = max(scores_and_dates, key=itemgetter(0))
        label = player.Name.split()[0] + ":" + str(y)
        ax.annotate(label, xy=(x, y), xycoords='data',
                    color = _mfc,
                    bbox=dict(boxstyle="round4", fc="w", alpha=0.75),
                    xytext=(-100, 30), textcoords='offset points', size=12,
                    arrowprops=dict(arrowstyle="fancy",
                                    fc="0.3", ec="none",
                                    patchB=Ellipse((2, -1), 0.5, 0.5),
                                    connectionstyle="angle3,angleA=0,angleB=-90"))

        #ax.text(ending_date, player.ranking(), player.Name, fontsize=12, color='g')

    # Plot
    # format the ticks
    ax.xaxis.set_major_locator(months)
    ax.xaxis.set_major_formatter(monthsFmt)
    ax.xaxis.set_minor_locator(mondays)
    ax.autoscale_view()
    ax.set_xlim(starting_date, ending_date)
    ax.grid(True)
    plt.legend(loc='best', shadow=True)
    plt.tick_params(axis='y', which='both', labelleft='on', labelright='on')
    plt.ylabel('Ranking')
    title = "Period: " + '{:%m/%d/%Y}'.format(starting_date) + ' ~ ' + '{:%m/%d/%Y}'.format(ending_date)
    plt.title(title)

    # rotates and right aligns the x labels, and moves the bottom of the
    # axes up to make room for them
    fig.autofmt_xdate()

    plt.show()
    
    return
def plot_main(pdata):
    xt, yy = pdata['xt'], pdata['yy']
    data_labels = pdata['data_labels']

    fig = plt.figure()
    fig.set_size_inches(6, 4.5)  ## (xsize,ysize)

    ### Page Title
    suptit = pdata['suptit']
    fig.suptitle(suptit,
                 fontsize=15,
                 y=0.97,
                 va='bottom',
                 stretch='semi-condensed')

    ### Parameters for subplot area
    abc = 'abcdefgh'
    fig.subplots_adjust(left=0.05, right=0.95, top=0.92, bottom=0.07)

    ### Plot time series
    ax1 = fig.add_subplot(111)
    ax1.plot(xt, yy[0], c='C0', lw=4, alpha=0.7,
             label=data_labels[0])  # Use left y-axis

    ax1b = ax1.twinx()
    ax1b.plot(xt, yy[1], c='C1', lw=1.5, alpha=0.9,
              label=data_labels[1])  # Use right y-axis
    ax1b.plot(xt,
              yy[2] * 2,
              c='C5',
              lw=1.5,
              alpha=0.9,
              label=data_labels[2] + 'x2')  # Use right y-axis

    #ax1.set_title('(a) Monthly',fontsize=13,ha='left',x=0)
    ax1.set_xticks([date(yy, 1, 1) for yy in range(2011, 2022, 2)])
    #ax1.xaxis.set_major_locator(YearLocator(base=2))  # Every 2 years; default: month=1, day=1
    ax1.xaxis.set_major_formatter(DateFormatter("%Y\n%b"))
    # <--- For more information of Date Format, see Reference above
    ax1.xaxis.set_minor_locator(AutoMinorLocator(2))

    ax1.set_ylim(-25, 25)
    ax1.yaxis.set_minor_locator(AutoMinorLocator(2))
    ax1.set_ylabel('(m/s)', fontsize=11)

    ax1b.set_ylim(-2.5, 2.75)
    ax1b.yaxis.set_minor_locator(AutoMinorLocator(2))
    ax1b.set_ylabel('(degC)', fontsize=11, rotation=-90, va='bottom')

    ax1.grid()
    ax1.axhline(y=0, c='k', lw=0.8)
    ### Legend for twinx() case
    fig.legend(bbox_to_anchor=(0.06, 0.075),
               loc='lower left',
               fontsize=11,
               ncol=3,
               borderaxespad=0.)

    ###---
    fnout = pdata['fnout']
    print(fnout)
    fig.savefig(fnout, bbox_inches='tight', dpi=150)
    plt.show()
    return
Ejemplo n.º 23
0
def pandas_candlestick_ohlc(dat, stick = "day", otherseries = None):
    """
    :param dat: pandas DataFrame object with datetime64 index, and float columns "Open", "High", "Low", and "Close", likely created via DataReader from "yahoo"
    :param stick: A string or number indicating the period of time covered by a single candlestick. Valid string inputs include "day", "week", "month", and "year", ("day" default), and any numeric input indicates the number of trading days included in a period
    :param otherseries: An iterable that will be coerced into a list, containing the columns of dat that hold other series to be plotted as lines
 
    This will show a Japanese candlestick plot for stock data stored in dat, also plotting other series if passed.
    """
    mondays = WeekdayLocator(MONDAY)        # major ticks on the mondays
    alldays = DayLocator()              # minor ticks on the days
    dayFormatter = DateFormatter('%d')      # e.g., 12
 
    # Create a new DataFrame which includes OHLC data for each period specified by stick input
    transdat = dat.loc[:,["Open", "High", "Low", "Close"]]
    if (type(stick) == str):
        if stick == "day":
            plotdat = transdat
            stick = 1 # Used for plotting
        elif stick in ["week", "month", "year"]:
            if stick == "week":
                transdat["week"] = pd.to_datetime(transdat.index).map(lambda x: x.isocalendar()[1]) # Identify weeks
            elif stick == "month":
                transdat["month"] = pd.to_datetime(transdat.index).map(lambda x: x.month) # Identify months
            transdat["year"] = pd.to_datetime(transdat.index).map(lambda x: x.isocalendar()[0]) # Identify years
            grouped = transdat.groupby(list(set(["year",stick]))) # Group by year and other appropriate variable
            plotdat = pd.DataFrame({"Open": [], "High": [], "Low": [], "Close": []}) # Create empty data frame containing what will be plotted
            for name, group in grouped:
                plotdat = plotdat.append(pd.DataFrame({"Open": group.iloc[0,0],
                                            "High": max(group.High),
                                            "Low": min(group.Low),
                                            "Close": group.iloc[-1,3]},
                                           index = [group.index[0]]))
            if stick == "week": stick = 5
            elif stick == "month": stick = 30
            elif stick == "year": stick = 365
 
    elif (type(stick) == int and stick >= 1):
        transdat["stick"] = [np.floor(i / stick) for i in range(len(transdat.index))]
        grouped = transdat.groupby("stick")
        plotdat = pd.DataFrame({"Open": [], "High": [], "Low": [], "Close": []}) # Create empty data frame containing what will be plotted
        for name, group in grouped:
            plotdat = plotdat.append(pd.DataFrame({"Open": group.iloc[0,0],
                                        "High": max(group.High),
                                        "Low": min(group.Low),
                                        "Close": group.iloc[-1,3]},
                                       index = [group.index[0]]))
 
    else:
        raise ValueError('Valid inputs to argument "stick" include the strings "day", "week", "month", "year", or a positive integer')
 
 
    # Set plot parameters, including the axis object ax used for plotting
    fig, ax = plt.subplots()
    fig.subplots_adjust(bottom=0.2)
    if plotdat.index[-1] - plotdat.index[0] < pd.Timedelta('730 days'):
        weekFormatter = DateFormatter('%b %d')  # e.g., Jan 12
        ax.xaxis.set_major_locator(mondays)
        ax.xaxis.set_minor_locator(alldays)
    else:
        weekFormatter = DateFormatter('%b %d, %Y')
    ax.xaxis.set_major_formatter(weekFormatter)
 
    ax.grid(True)
 
    # Create the candelstick chart
    candlestick_ohlc(ax, list(zip(list(date2num(plotdat.index.tolist())), plotdat["Open"].tolist(), plotdat["High"].tolist(),
                      plotdat["Low"].tolist(), plotdat["Close"].tolist())),
                      colorup = "black", colordown = "red", width = stick * .4)
 
    # Plot other series (such as moving averages) as lines
    if otherseries != None:
        if type(otherseries) != list:
            otherseries = [otherseries]
        dat.loc[:,otherseries].plot(ax = ax, lw = 1.3, grid = True)
 
    ax.xaxis_date()
    ax.autoscale_view()
    plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')
 
    plt.show()
N = times.shape[0]  # Number of data points
temp_outside = np.loadtxt('t_out.txt', dtype=float)  # indoor temperature
temp_inside = np.loadtxt('t_in.txt', dtype=float)  # outdoor temperature

dates = np.zeros((N, 1), dtype=dt)
for i in xrange(N):
    dates[i] = dt(2014, times[i, 0], times[i, 1], times[i, 2], times[i, 3])

print("Generating Plot...")
plt.plot(dates, temp_outside, 'b-', dates, temp_inside, 'r-')
plt.legend(["Outdoor", "Indoor"], loc=4)
plt.xlabel('Date Time (PST)')
plt.ylabel('Temperature (*C)')
plt.title('Apartment Indoor/Outdoor Temperatures')

formatter = DateFormatter('%m/%d %H:%M')
fig = plt.gcf()
fig.axes[0].xaxis.set_major_formatter(formatter)
# plt.show()

ax = plt = plt.gca()
ax.grid(True)

fig.set_size_inches(18.5, 10.5)
fig.savefig('temperatures.png', dpi=100)
# plt.savefig('temperatures.png')
print("Finished.")

# # Generate dates
# dates = zeros(N,1);
# for i = 1:N
Ejemplo n.º 25
0
                     ha="center",
                     size=6,
                     c=line.get_color())

plt.semilogy()

plt.xlabel("Date")
plt.ylabel("Hospitalizations count")

from matplotlib.dates import AutoDateLocator, DateFormatter
from matplotlib.ticker import LogLocator, NullLocator, LogFormatter
from util import LogFormatterSI
import numpy

plt.gca().xaxis.set_major_locator(AutoDateLocator())
plt.gca().xaxis.set_major_formatter(DateFormatter("%m/%d"))
#plt.gca().xaxis.set_minor_locator(AutoDateLocator())
plt.gca().yaxis.set_major_locator(LogLocator(subs=(1, 2, 5)))
plt.gca().yaxis.set_major_formatter(
    LogFormatterSI(labelOnlyBase=False,
                   minor_thresholds=(numpy.inf, numpy.inf)))
plt.gca().yaxis.set_minor_locator(NullLocator())

plt.title("New York City COVID-19 Hospitalizations Cumulative Total")

#plt.xlim(left=arrow.get("2020-03-01"))
#plt.ylim(bottom=10)

plt.legend()
plt.savefig("plots/NYC-Hospitalizations-total.png", dpi=300)
Ejemplo n.º 26
0
ax = seaborn.regplot(data=df,
                     x='date_ordinal',
                     y='weight',
                     ci=None,
                     color='pink')

# Tighten up the axes for prettiness
ax.set_xlim(df['date_ordinal'].min() - 10, df['date_ordinal'].max() + 10)
ax.set_ylim(195, 215)

#Replace the ordinal X-axis labels with nice, readable dates
new_labels = df['date_time']
ax.set_xticklabels(new_labels)

#format the date_time varaible so that it shows mo/day
myFmt = DateFormatter("%m/%d")

#plug that format into what is seen on the xaxis
ax.xaxis.set_major_formatter(myFmt)

#make tick marks be the first of each month
ax.xaxis.set_major_locator(mdates.MonthLocator(interval=1))

#create labels
plt.xlabel('Month/Day', fontweight='bold')
plt.ylabel('Pounds', fontweight='bold')
plt.title("Weight loss from March to June 2019", fontweight='bold')

#save figure
plt.savefig(
    '/Users/janestout/Dropbox/Projects/weight_loss/weight_loss_line1.png')
Ejemplo n.º 27
0
    timeline = np.arange(days[0], days[0] + len(days) + args.runtime - 1, 0.1)
    # t_new is the number of days that shall be plotted using the parameters
    #   popt retrieved from the fit.
    t_new = np.arange(0.0, len(days) + args.runtime - 1, 0.1)
    # The datapoints for the extrapolated function, n. of infected
    fitted_i = []
    # The datapoints for the extrapolated function, nr. of tests
    fitted_t = []
    for t in t_new:
        fitted_i.append(func(t, *popt_i))
        # fitted_t.append(func(t, *popt_t))

    # ======================================================================== #
    # Plot the data.

    formatter = DateFormatter('%d.%m.%Y')

    fig = plt.figure(figsize=(8.27, 11.69))
    ax1 = fig.add_subplot(311)
    ax2 = fig.add_subplot(312, sharex=ax1)
    ax3 = fig.add_subplot(313, sharex=ax1)

    ax1.plot_date(days, infected, label='Infected')
    ax1.plot(timeline,
             fitted_i,
             label=f'Fit: f(x) = {popt_i[0]:.2f} * exp({popt_i[1]:.2f} * x)')

    ax2.plot_date(days, dead, label='Dead')
    ax2.plot_date(days, recovered, label='Recovered')

    ax3.plot_date(days, tested, label='Tests')
Ejemplo n.º 28
0
data = pd.DataFrame().from_records(candles)
data.rename({0: 'time', 1 : 'open', 2 : 'high', 3: 'low', 4: 'close'}, axis=1, inplace=True)

data['date'] = pd.to_datetime(data['time'],unit='ms')
data['close'] = pd.to_numeric(data['close'])
data['high'] = pd.to_numeric(data['high'])
data['low'] = pd.to_numeric(data['low'])
data.set_index('date', inplace=True)
data.index = data.index.tz_localize('UTC').tz_convert('US/Eastern')
data

# Visualize data closing price history


fig, ax = plt.subplots()
myFmt = DateFormatter("%D %H %M %S")
ax.plot(data['close'])
ax.plot(data['high'])
ax.plot(data['low'])
ax.set_title('BTC 15 minute closing Price History')
# plt.plot(data['close'])
ax.set_xlabel('time', fontsize = 14)
ax.set_ylabel('Price $USD', fontsize=17)
ax.xaxis.set_major_locator(mdates.MinuteLocator(interval=240))
ax.xaxis.set_major_formatter(myFmt)

## Rotate date labels automatically
fig.autofmt_xdate()
fig.set_size_inches(25,10)
plt.show()
Ejemplo n.º 29
0
    def plot(
        self,
        ax=None,
        energy_index=None,
        time_format="mjd",
        flux_unit="cm-2 s-1",
        **kwargs,
    ):
        """Plot flux points.

        Parameters
        ----------
        ax : `~matplotlib.axes.Axes`, optional.
            The `~matplotlib.axes.Axes` object to be drawn on.
            If None, uses the current `~matplotlib.axes.Axes`.
        energy_index : int
            The index of the energy band to use. If set to None, use the first energy index.
            Default is None.
        time_format : {'mjd', 'iso'}, optional
            If 'iso', the x axis will contain Matplotlib dates.
            For formatting these dates see: https://matplotlib.org/gallery/ticks_and_spines/date_demo_rrule.html
        flux_unit : str, `~astropy.units.Unit`, optional
            Unit of the flux axis
        kwargs : dict
            Keyword arguments passed to :func:`matplotlib.pyplot.errorbar`

        Returns
        -------
        ax : `~matplotlib.axes.Axes`
            Axis object
        """
        import matplotlib.pyplot as plt
        from matplotlib.dates import DateFormatter

        if ax is None:
            ax = plt.gca()

        x, xerr = self._get_times_and_errors(time_format=time_format)
        y, yerr = self._get_fluxes_and_errors(unit=flux_unit)
        is_ul, yul = self._get_flux_uls(unit=flux_unit)

        if len(y.shape) > 1:
            if energy_index is None:
                energy_index = 0

            y = y[:, energy_index]
            if len(yerr) > 1:
                yerr = [_[:, energy_index] for _ in yerr]
            else:
                yerr = yerr[:, energy_index]
            is_ul = is_ul[:, energy_index]
            yul = yul[:, energy_index]

        # length of the ul arrow
        ul_arr = (np.nanmax(np.concatenate((y[~is_ul], yul[is_ul]))) -
                  np.nanmin(np.concatenate((y[~is_ul], yul[is_ul])))) * 0.1

        # join fluxes and upper limits for the plot
        y[is_ul] = yul[is_ul]
        yerr[0][is_ul] = ul_arr

        # set plotting defaults and plot
        kwargs.setdefault("marker", "+")
        kwargs.setdefault("ls", "None")

        ax.errorbar(x=x, y=y, xerr=xerr, yerr=yerr, uplims=is_ul, **kwargs)
        ax.set_xlabel("Time ({})".format(time_format.upper()))
        ax.set_ylabel("Flux ({:FITS})".format(u.Unit(flux_unit)))
        ax.legend()
        if time_format == "iso":
            ax.xaxis.set_major_formatter(DateFormatter("%Y-%m-%d %H:%M:%S"))
            plt.setp(
                ax.xaxis.get_majorticklabels(),
                rotation=30,
                ha="right",
                rotation_mode="anchor",
            )

        return ax
Ejemplo n.º 30
0
def main():
    if regenerate_data:
        created = {}
        seen_times = {}
        seen_ups = {}
        seen_scores = {}

        with open(InputFilename, 'r') as f:
            for line in f:
                parts = [x for x in line.strip().split("\t")]
                if len(parts) != 5:
                    raise Exception(f"Unexpected data here: {line}")

                # this_time = int(parts[2][0:3])
                this_time = datetime.datetime.strptime(
                    parts[0].split(".")[0],
                    "%Y-%m-%d %H:%M:%S") - datetime.timedelta(hours=7)
                this_id = parts[1]
                this_created = datetime.datetime.fromtimestamp(float(parts[2]))
                this_ups = int(parts[3])
                this_score = int(parts[4])

                if this_id not in created:
                    created[this_id] = this_created
                    assert (this_id not in seen_times)
                    seen_times[this_id] = []
                    seen_ups[this_id] = []
                    seen_scores[this_id] = []

                assert (created[this_id] == this_created)

                if this_time < this_created + datetime.timedelta(hours=4):
                    seen_times[this_id].append(this_time)
                    seen_ups[this_id].append(this_ups)
                    seen_scores[this_id].append(this_score)

        with open(CreatedPickle, 'wb') as f:
            pickle.dump(created, f)
        with open(SeenTimesPickle, 'wb') as f:
            pickle.dump(seen_times, f)
        with open(SeenUpsPickle, 'wb') as f:
            pickle.dump(seen_ups, f)
        with open(SeenScoresPickle, 'wb') as f:
            pickle.dump(seen_scores, f)

    with open(CreatedPickle, 'rb') as f:
        created = pickle.load(f)
    with open(SeenTimesPickle, 'rb') as f:
        seen_times = pickle.load(f)
    with open(SeenUpsPickle, 'rb') as f:
        seen_ups = pickle.load(f)
    with open(SeenScoresPickle, 'rb') as f:
        seen_scores = pickle.load(f)

    print(f"Found {len(created)} unique submissions")

    ax = plt.subplot()

    # Only show submissions created since we started logging
    beginning_of_log = min([x[0] for x in seen_times.values() if len(x) > 0])
    created = [x for x in created if (created[x] > beginning_of_log)]

    # Only show submissions that reached thresh
    # thresh = 50
    # created = [x for x in created if len(seen_scores[x]) > 0 and max(seen_scores[x]) >= thresh]

    # Only show a random sample of submissions
    # proportion = 0.1
    # created = [x for x in created if random.uniform(0, 1) < proportion]

    print(f"After filtering, {len(created)} will be displayed")

    for id in created:
        plt.plot_date(seen_times[id],
                      seen_ups[id],
                      xdate=True,
                      markersize=1,
                      marker=".",
                      linestyle="solid")

    rule = rrulewrapper(DAILY, interval=1)
    loc = RRuleLocator(rule)
    ax.xaxis.set_major_locator(loc)

    formatter = DateFormatter('%m/%d')
    ax.xaxis.set_major_formatter(formatter)

    ax.set_ylim([0, 500])

    plt.tight_layout()

    plt.show()