def get_ytd_price(ticker): US_BUSINESS_DAY = CustomBusinessDay(calendar=USFederalHolidayCalendar()) last_day = date.today() - offsets.YearBegin() - US_BUSINESS_DAY last_day = last_day.strftime('%Y-%m-%d') url = 'https://financialmodelingprep.com/api/v3/historical-price-full/' + ticker + '?from=' + last_day + '&to=' + last_day df = get_jsonparsed_data_price(url) return df['close'].to_string(index=False)
def get_calendar_days(): with MysqlManager('wind') as session: try: year_begain = (date.today() - offsets.YearBegin()).strftime('%Y%m%d') year_end = (date.today() + offsets.YearEnd()).strftime('%Y%m%d') # AShareCalendar -- 中国A股交易日历; SSE -- 上海证券交易所 sql_fetch_all = """SELECT TRADE_DAYS FROM AShareCalendar WHERE S_INFO_EXCHMARKET = 'SSE' AND TRADE_DAYS BETWEEN {0} AND {1}""".format( year_begain, year_end) trade_days = set( session.read_sql(sql_fetch_all, to_DataFrame=True)['TRADE_DAYS'].tolist()) all_days = set( map( lambda dt: dt.strftime('%Y%m%d'), rrule.rrule(rrule.DAILY, dtstart=datetime.strptime( year_begain, '%Y%m%d'), until=datetime.strptime(year_end, '%Y%m%d')))) no_trade_days = all_days - trade_days return trade_days, no_trade_days except Exception as e: print(e) return None, None finally: print('pas de bras pas de chocolat. No chocolate without arms.')
def set_time_point(self, time_frequency='D'): geo_time_db = pd.DatetimeIndex(self.dset_geo_point[self.column_db_tag_time].values).unique().sort_values() time_start = geo_time_db[0] - offsets.YearBegin() time_end = geo_time_db[-1] + offsets.YearEnd() time_range = pd.date_range(start=time_start, end=time_end, freq=time_frequency) time_range = pd.DatetimeIndex(time_range.format(formatter=lambda x: x.strftime('%Y-%m-%d'))) return time_range
def test_addsub_offset(self, ts_tz): # specifically non-Tick offset off = offsets.YearBegin(1) result = ts_tz + off assert isinstance(result, Timestamp) assert result._reso == ts_tz._reso # If ts_tz is ever on the last day of the year, the year would be # incremented by one assert result.year == ts_tz.year assert result.day == 31 assert result.month == 12 assert tz_compare(result.tz, ts_tz.tz) result = ts_tz - off assert isinstance(result, Timestamp) assert result._reso == ts_tz._reso assert result.year == ts_tz.year - 1 assert result.day == 31 assert result.month == 12 assert tz_compare(result.tz, ts_tz.tz)
def get_ytd(symbol: str, data_source: str = "yahoo", api_key: str = None) -> np.float64: """ Retrieves the Year To Date percentage return for a particular stock Parameters ---------- symbol: str data_source: str, optional api_key: str, optional Returns ------- numpy.float64 Returns the ytd obtained from parameters """ today = datetime.now().date() start_of_year = today - offsets.YearBegin() start_date = datetime.date(start_of_year) # Get stock data data = get_data(symbol, data_source, start_date=str(start_date), end_date=str(today), api_key=api_key) try: # Retrieves the start of year stock info and the current day stock info first_current = data.iloc[[0, -1]] # Takes the percentage change difference from first_current temp = pd.DataFrame.pct_change(first_current["Close"]) # Returns percent change from start of year to present day return temp[-1] except (IndexError, KeyError): pass
def get(self, request, employee_id): ids_bests = request.session.get('ids_best_perf', []) if int(employee_id) in ids_bests: is_best = True else: is_best = False employee = get_object_or_404(Employee, id=employee_id) today = datetime.date.today().strftime('%Y-%m-%d') # Territories part territories = Territory.objects.filter(clerks=Employee.objects.get( id=employee_id)) # TimExpPart regular_amount_vac = 26 vacations_to_be_taken = 0 first_day_of_this_year = (datetime.date.today() - offsets.YearBegin()).strftime('%Y-%m-%d') last_day_of_this_year = (datetime.date.today() + offsets.YearEnd()).strftime('%Y-%m-%d') vacations_already_booked = len( TimeExp.objects.filter(clerk=employee_id, type=2, date__gte=first_day_of_this_year).filter( date__lte=last_day_of_this_year)) vacations_to_be_taken = regular_amount_vac - vacations_already_booked all_vacations_to_happen = TimeExp.objects.filter(clerk=employee_id, type=2, date__gte=today) all_trainings_to_happen = TimeExp.objects.filter(clerk=employee_id, type=3, date__gte=today) # Productivity part clerk_inbound_call = [] clerk_outbound_call = [] clerks_outbound_mail = [] clerks_inbound_mail = [] all_clerk_productivity = Productivity.objects.filter(clerk=employee_id) all_dates_for_productivity = [ result.date for result in all_clerk_productivity ] desired_dates = [] for date in all_dates_for_productivity: if date not in desired_dates: desired_dates.append(date) desired_dates.sort() desired_dates = desired_dates[-15:] for date in desired_dates: result_inbound_call = Productivity.objects.get(date=date, clerk=employee_id, type=1) clerk_inbound_call.append(int(result_inbound_call.number)) result_outbound_call = Productivity.objects.get(date=date, clerk=employee_id, type=2) clerk_outbound_call.append(int(result_outbound_call.number)) result_inbound_mail = Productivity.objects.get(date=date, clerk=employee_id, type=3) clerks_inbound_mail.append(int(result_inbound_mail.number)) result_outbound_mail = Productivity.objects.get(date=date, clerk=employee_id, type=4) clerks_outbound_mail.append(int(result_outbound_mail.number)) trace1 = go.Bar(x=desired_dates, y=clerk_inbound_call, name='Inbound Calls') trace2 = go.Bar(x=desired_dates, y=clerk_outbound_call, name='Outbound Calls') trace3 = go.Bar(x=desired_dates, y=clerks_inbound_mail, name='Inbound Mails') trace4 = go.Bar(x=desired_dates, y=clerks_outbound_mail, name='Outbound Mails') clerks_data = [trace1, trace2, trace3, trace4] layout = go.Layout(barmode='group', title='Productivity by Contact Type') fig = go.Figure(data=clerks_data, layout=layout) clerk_div = opy.plot(fig, auto_open=False, output_type='div') # productivity tendencies engine = create_engine( 'postgresql+psycopg2://postgres:coderslab@localhost/coll_db') prod_clerk_df = pd.read_sql_query( 'SELECT "date", "number", "type" FROM mng_dashboard_productivity WHERE clerk_id={}' .format(employee_id), con=engine) av_clerks_in_call = prod_clerk_df[prod_clerk_df.type == 1].number.mean() av_clerks_out_call = prod_clerk_df[prod_clerk_df.type == 2].number.mean() # av_clerks_in_mail = prod_clerk_df[prod_clerk_df.type==3].number.mean() # av_clerks_out_mail = prod_clerk_df[prod_clerk_df.type==4].number.mean() ctx = { "employee": employee, "active_employees": Employee.active_employees(), "all_vacations_to_happen": all_vacations_to_happen, "all_trainings_to_happen": all_trainings_to_happen, "vacations_to_be_taken": vacations_to_be_taken, "territories": territories, "all_territories": Territory.objects.all(), "clerk_prod_graph": clerk_div, "is_best": is_best, "av_in_calls": round(av_clerks_in_call, 2), "av_out_calls": round(av_clerks_out_call, 2) } return render(request, "employee_details.html", ctx)
def global_yields(countries=[ 'U.S.', 'Germany', 'U.K.', 'Italy', 'France', 'Canada', 'China', 'Australia', 'Japan', 'India', 'Russia', 'Brazil', 'Philippines', 'Thailand' ]): """ """ tdy = str(date.today().day) + '/' + str(date.today().month) + '/' + str( date.today().year) oneyr = str(date.today().day) + '/' + str( date.today().month) + '/' + str(date.today().year - 1) tens = pd.DataFrame(index=pd.bdate_range(start=oneyr, end=date.today())) tens.index.name = 'Date' fives = pd.DataFrame(index=pd.bdate_range(start=oneyr, end=date.today())) fives.index.name = 'Date' twos = pd.DataFrame(index=pd.bdate_range(start=oneyr, end=date.today())) twos.index.name = 'Date' cntry = countries for i in range(len(cntry)): tens = tens.merge(ytm(cntry[i], '10Y'), on='Date') for i in range(len(cntry)): fives = fives.merge(ytm(cntry[i], '5Y'), on='Date') for i in range(len(cntry)): twos = twos.merge(ytm(cntry[i], '2Y'), on='Date') ytd = date.today() - offsets.YearBegin() #10 Year teny = pd.DataFrame(data=(tens.iloc[-1, :], tens.diff(1).iloc[-1, :] * 100, tens.diff(1).iloc[-5, :] * 100, (tens.iloc[-1, :] - tens[ytd:].iloc[0, :]) * 100, (tens.iloc[-1, :] - tens.iloc[0, :]) * 100)) teny = teny.T cols = [('10Y', 'Yield'), ('10Y', '1 Day'), ('10Y', '1 Week'), ('10Y', 'YTD'), ('10Y', '1 Year')] teny.columns = pd.MultiIndex.from_tuples(cols) teny.index.name = 'Countries' #5 Year fivey = pd.DataFrame( data=(fives.iloc[-1, :], fives.diff(1).iloc[-1, :] * 100, fives.diff(1).iloc[-6, :] * 100, (fives.iloc[-1, :] - fives[ytd:].iloc[0, :]) * 100, (fives.iloc[-1, :] - fives.iloc[0, :]) * 100)) fivey = fivey.T cols = [('5Y', 'Yield'), ('5Y', '1 Day'), ('5Y', '1 Week'), ('5Y', 'YTD'), ('5Y', '1 Year')] fivey.columns = pd.MultiIndex.from_tuples(cols) fivey.index.name = 'Countries' #2 Year twoy = pd.DataFrame(data=(twos.iloc[-1, :], twos.diff(1).iloc[-1, :] * 100, twos.diff(1).iloc[-6, :] * 100, (twos.iloc[-1, :] - twos[ytd:].iloc[0, :]) * 100, (twos.iloc[-1, :] - twos.iloc[0, :]) * 100)) twoy = twoy.T cols = [('2Y', 'Yield'), ('2Y', '1 Day'), ('2Y', '1 Week'), ('2Y', 'YTD'), ('2Y', '1 Year')] twoy.columns = pd.MultiIndex.from_tuples(cols) twoy.index.name = 'Countries' yields = twoy.merge(fivey, on='Countries').merge(teny, on='Countries') data = yields.style.format('{0:,.3f}%', subset=[('2Y', 'Yield'), ('5Y', 'Yield'), ('10Y', 'Yield')])\ .background_gradient(cmap='RdYlGn_r', subset=list(yields.columns.drop(('2Y', 'Yield')).drop(('5Y', 'Yield')).drop(('10Y', 'Yield')))).set_precision(2) return data
from IPython.core.display import display, HTML from bs4 import BeautifulSoup import csv from yahooquery import Ticker from plotly.subplots import make_subplots st.write(""" # Cross Asset Market Analytics """) from pandas.tseries import offsets one_m = date.today() - datetime.timedelta(30) three_m = date.today() - datetime.timedelta(90) six_m = date.today() - datetime.timedelta(120) one_yr = date.today() - datetime.timedelta(370) ytd = date.today() - offsets.YearBegin() year = date.today().year yest = date.today() - datetime.timedelta(1) now = datetime.datetime.now() now = now.strftime("%b %d, %Y %H:%M") tdy = str(date.today().day) + '/' + str(date.today().month) + '/' + str( date.today().year) oneyr = str(date.today().day) + '/' + str( date.today().month) + '/' + str(date.today().year - 1) components.iframe("https://harshshivlani.github.io/x-asset/liveticker") #Define function to fetch historical data from Investing.com def hist_data(name, country):
max_slackplanes = 10 # not found from data, just guessed # Adds depot_length to start_date Data['Depot_End_Date'] = 0 for i in range(len(Data)): Data.iloc[i, 8] = Data.iloc[i, 3] + timedelta(days=Data.iloc[0, 4].item()) # Depot_Length_months Data['Depot_Length_months'] = (Data['Depot_Length'] / 30).round( 0) # create depot_length in months ''' CREATE DEPOT DATA ''' #Depot_Data = numpy.zeros(shape=(max_planes,max_months)) start_date = Data.iloc[0, 3] - offsets.YearBegin() # start at beginning of year start_year = start_date.year start_month = start_date.month # convert length and end from float to int for addition Data['Depot_Length_months'] = Data['Depot_Length_months'].astype(numpy.int64) # Month_start is month + year - 1 (python indeces start at 0) Data['SSD_Month_start'] = (Data['year'] - start_year) * 12 + (Data['month'] - start_month) Data['SSD_Month_start'] = Data['SSD_Month_start'].astype(numpy.int64) Data['SSD_Month_end'] = Data['SSD_Month_start'] + Data[ 'Depot_Length_months'] # month start + depot length (varies) # Create Depot_Data (new value for every month in depot) Depot_Data = Data[['Tail_Number', 'SSD_Month_start', 'SSD_Month_end']]
def get_since_until( time_range: Optional[str] = None, since: Optional[str] = None, until: Optional[str] = None, time_shift: Optional[str] = None, relative_start: Optional[str] = None, relative_end: Optional[str] = None, ) -> Tuple[datetime, datetime]: """Return `since` and `until` date time tuple from string representations of time_range, since, until and time_shift. This functiom supports both reading the keys separately (from `since` and `until`), as well as the new `time_range` key. Valid formats are: - ISO 8601 - X days/years/hours/day/year/weeks - X days/years/hours/day/year/weeks ago - X days/years/hours/day/year/weeks from now - freeform Additionally, for `time_range` (these specify both `since` and `until`): - Last day - Last week - Last month - Last quarter - Last year - No filter - Last X seconds/minutes/hours/days/weeks/months/years - Next X seconds/minutes/hours/days/weeks/months/years """ separator = " : " relative_start = parse_human_datetime(relative_start if relative_start else "today") relative_end = parse_human_datetime(relative_end if relative_end else "today") common_time_frames = { "Last day": ( relative_start - relativedelta(days=1), # type: ignore relative_end, ), "Last week": ( relative_start - relativedelta(weeks=1), # type: ignore relative_end, ), "Last month": ( relative_start - relativedelta(months=1), # type: ignore relative_end, ), "Last quarter": ( relative_start - relativedelta(months=3), # type: ignore relative_end, ), "Last year": ( relative_start - relativedelta(years=1), # type: ignore relative_end, ), "This year": ( date.today() - offsets.YearBegin(), # type: ignore date.today() + offsets.YearEnd(), ), "This month": ( relative_start - relativedelta(days=relative_start.day - 1) , # type: ignore relative_end, ), "This week": ( relative_start - relativedelta(days=relative_start.weekday()), # type: ignore relative_end, ), } if time_range: if separator in time_range: since, until = time_range.split(separator, 1) if since and since not in common_time_frames: since = add_ago_to_since(since) since = parse_human_datetime(since) until = parse_human_datetime(until) elif time_range in common_time_frames: since, until = common_time_frames[time_range] elif time_range == "No filter": since = until = None else: rel, num, grain = time_range.split() if rel == "Last": since = relative_start - relativedelta( # type: ignore **{grain: int(num)} ) until = relative_end else: # rel == 'Next' since = relative_start until = relative_end + relativedelta( # type: ignore **{grain: int(num)} ) else: since = since or "" if since: since = add_ago_to_since(since) since = parse_human_datetime(since) until = parse_human_datetime(until) if until else relative_end if time_shift: time_delta = parse_past_timedelta(time_shift) since = since if since is None else (since - time_delta) # type: ignore until = until if until is None else (until - time_delta) # type: ignore if since and until and since > until: raise ValueError(_("From date cannot be larger than to date")) return since, until # type: ignore