def apply_term_extensions(item, revrec_schedule, term_extensions): """Adjusts the specified revrec schedule for term extensions. :param item: InvoiceItem object. :param revrec_schedule: Dictionary of debits and credits by day :param term_extensions: Term Extension objects. """ for ext in term_extensions: defrev = revrec_schedule[day_before(ext.grant_date)]['ending_defrev'] start = ext.grant_date end = ext.service_end daily_amort = defrev / days_elapsed(start, end) for date in daterange(start, end): defrev = defrev - daily_amort template = dict(revrec_daily_template) template['cr_rev'] = daily_amort template['dr_defrev'] = daily_amort template['ending_defrev'] = defrev try: revrec_schedule[date]['cr_rev'] = daily_amort revrec_schedule[date]['ending_defrev'] = defrev except KeyError: revrec_schedule[date] = template
def get_weather_data(apikey, locs, cols, start_date, end_date, offset): """ Retrieves daily historical weather data for the specified locations using the Dark Sky API. Output is saved as a CSV in the 'data' folder. Args: api_key (str): Dark Sky API key. locs (str): Geocoded locations file name (with extension). cols (str): File name contain custom column names. start_date (datetime.datetime): Start date for historical data range. end_date (datetime.datetime): End date for historical data range. offset (int): Step size for iterator (number of days). """ locs_path = get_datafile(locs) locs = pd.read_csv(locs_path) # get columns list columns = get_datafile(cols) with open(columns) as f: cols = [line.strip() for line in f] # extract data for each location for date range b/w start and end date tbl = [] for index, row in locs.iterrows(): for single_date in daterange(start_date, end_date, offset): forecast = forecastio.load_forecast(apikey, row['Lat'], row['Lng'], time=single_date, units='si') h = forecast.daily() tz = forecast.json['timezone'] d = h.data for p in d: # get date info utc = p.d['time'] dts = dt.datetime.utcfromtimestamp(utc) isodate = dt.datetime.utcfromtimestamp(utc).isoformat() date_info = [tz, isodate, dts.year, dts.month, dts.day] # get location info loc, lat, lng = row['Location'], row['Lat'], row['Lng'] elevation = row['Elevation'] loc_info = [loc, lat, lng, elevation] # get weather attributes - need to handle possible KeyErrors temp_high = p.d.get('temperatureHigh', None) temp_low = p.d.get('temperatureLow', None) humidity = p.d.get('humidity', None) * 100 pressure = p.d.get('pressure', None) attr_info = [temp_high, temp_low, humidity, pressure] tbl.append(loc_info + date_info + attr_info) # convert output to data frame df = pd.DataFrame(tbl) df.columns = cols filename = 'historical_data.csv' save_data(df, filename, sep='|')
def get_prayer_record_range(self, date, max_date=date.today()): '''Get the prayer records of all days after the specified date and upto the maximum date''' # Loop through the date range and ensure that record for all the dates exist for dt in daterange(date, max_date + timedelta(1)): self.create_record(dt) cursor = self.db.cursor() cursor.execute("SELECT fajr, dhuhr, asr, maghrib, isha FROM record WHERE date >= ? AND date <= ? ", (date, max_date)) return cursor.fetchall()
def amortize_service_fee(item, payment_date): """ Creates a daily revenue recognition schedule for the specified invoice item based on daily amortization of deferred revenue to revenue. :param item: InvoiceItem object. :param payment_date: Date of payment. :return dict. Daily revrec schedule of debit and credit journal entries. """ # Daily revenue recognition schedule revrec_schedule = {} # The amortization period begins on the day of the payment and continues # until the end of the invoice item's service term revrec_start = max(payment_date, item.service_start) revrec_end = item.service_end # Fee amount amount = item.total_amount # Deferred revenue does not exist until payment is made defrev = 0 daily_amort = 0 cumul_rev = 0 # Generating revenue recognition schedule. Iterate over service period by day. for date in daterange(item.service_start, item.service_end): # On payment date, set the deferred revenue balance and daily revenue amortization if date == payment_date: defrev = amount daily_amort = amount / days_elapsed(revrec_start, revrec_end) # From payment date and onwards, amortize deferred revenue to revenue if date >= payment_date: defrev = defrev - daily_amort cumul_rev += daily_amort values = { 'cr_rev': daily_amort, 'dr_defrev': daily_amort, 'ending_defrev': defrev, 'cumul_rev': cumul_rev, } revrec_schedule[date] = create_schedule(values) return revrec_schedule
def amortize_amount(revrec_schedule, amt, start_date, end_date): """ Modifies an existing revrec_schedule by amortizing the amount over the specified period. Existing revenue and deferred revenue values will be overwritten for the period. :param revrec_schedule: Dictionary of debits and credits by day :param amt: Amount of deferred revenue to be amortized. """ daily_amort = amt / days_elapsed(start_date, end_date) defrev = amt for date in daterange(start_date, end_date): defrev = defrev - daily_amort revrec_schedule[date]['cr_rev'] = daily_amort revrec_schedule[date]['ending_defrev'] = defrev revrec_schedule[date]['dr_defrev'] = daily_amort
def apply_grace_period(revrec_schedule, item, payment_date): """Adjusts the specified revrec schedule for late payment, which requires journal entries related to grace period. :param revrec_schedule: Dictionary of debits and credits by day to be adjusted due to application of grace period. :param item: InvoiceItem object :param payment_date: Date of payment :return list. Supporting notes on grace period calculations. Displayed in UI output. """ gp_notes = [] service_start = item.service_start service_end = item.service_end # Identify the details of the prev paid service period. shift = {'Monthly': 30, 'Yearly': 365, 'Biyearly': 730} prev_service_end = day_before(service_start) prev_service_start = prev_service_end - timedelta(shift[item.billperiod] - 1) prev_service_term = days_elapsed(prev_service_start, prev_service_end) prev_amount = item.total_amount prev_amort = prev_amount / prev_service_term # Financial reporting days. Important for determining debit and credit entries. current_reporting_day = datetime( service_start.year, service_start.month, last_day_of_month(service_start.year, service_start.month)) prev_reporting_day = day_before( datetime(prev_service_end.year, prev_service_end.month, 1)) # Notes for display in the UI output. gp_notes.append('JOURNAL ENTRIES FOR GRACE PERIOD') gp_notes.append('---' * 60) gp_notes.append( 'payment date: %s, current term: %s -> %s, prev term: %s -> %s' % (pretty_date(payment_date), pretty_date(service_start), pretty_date(service_end), pretty_date(prev_service_start), pretty_date(prev_service_end))) gp_notes.append( 'current reporting day: %s, prev reporting day: %s, days reserved for: %s -> %s' % (pretty_date(current_reporting_day), pretty_date(prev_reporting_day), prev_service_start, prev_reporting_day)) # For each date that payment is late... running_total_dr_reserve = 0 for date in daterange(service_start, day_before(payment_date)): # The previous service term is extended by the grace period used revised_service_term = prev_service_term + days_elapsed( service_start, date) # Revenue amortization for the extended service term revised_amort = prev_amount / revised_service_term # Difference in the daily amortization for the previous service term amort_difference = prev_amort - revised_amort # The difference b/n revenue that should have been recognized vs. what was recognized in the period # prior to the previous reporting day. This amount has already been reserved for, so the journal # entry is a debit against the reserve for grace periods. revised_dr_reserve_graceperiod = days_elapsed( prev_service_start, prev_reporting_day) * amort_difference # We have been debiting the reserve for each day late, this day's debit amount is equal to the # incremental debit against the reserve. The other side of the entry is credit contra-revenue. dr_reserve_graceperiod = revised_dr_reserve_graceperiod - running_total_dr_reserve cr_contra_rev = dr_reserve_graceperiod revrec_schedule[date][ 'dr_reserve_graceperiod'] = dr_reserve_graceperiod revrec_schedule[date]['cr_contra_rev'] = cr_contra_rev gp_notes.append( '%s, PrevTerm: %s->%s, PrevAmort: $%s->$%s, Value: %s, DaysReservedFor: %s, \ PrevTotalReserve %s, TotalReserve: %s, DR reserve: %s' % (pretty_date(date), prev_service_term, revised_service_term, round(prev_amort, 3), round( revised_amort, 3), revised_service_term * revised_amort, (prev_reporting_day - prev_service_start).days + 1, round(running_total_dr_reserve, 3), round(revised_dr_reserve_graceperiod, 3), round(dr_reserve_graceperiod, 3))) running_total_dr_reserve += dr_reserve_graceperiod gp_notes.append('---' * 60) return gp_notes
def apply_refunds(revrec_schedule, invoice_amount, item, refunds): """Adjusts the specified revrec schedule for refunds. :param revrec_schedule: Dictionary of debits and credits by day :param invoice_amount: Total amount of the invoice :param item: The Item object :param refunds: Refund objects """ # Adjustment amortization due to refunds for ref in refunds: proportion = item.total_amount / invoice_amount refund_applied = ref.refund_amount * proportion last_day_of_schedule = max(revrec_schedule.keys()) # Deferred revenue and total revenue recognized as of beginning of refund date stats_as_of_refund_date = { 'ending_defrev': revrec_schedule[day_before(ref.refund_date)]['ending_defrev'], 'cr_rev': sum([ row['cr_rev'] for date, row in revrec_schedule.iteritems() if date < ref.refund_date ]) } # positive_item_amount: is the invoice item a charge vs. discount/pro-rated credit? # service_cancelled: is refund related to service being cancelled? flags = { 'positive_item_amount': item.total_amount >= 0, 'service_cancelled': ref.cancel_flag } # Calculate debits and credits associated with refund event results = refund_calc(flags=flags, revrec_start_date=min(revrec_schedule.keys()), refund_date=ref.refund_date, refund_amount=refund_applied, stats_as_of_refund_date=stats_as_of_refund_date) # Remaining deferred revenue remaining_defrev = stats_as_of_refund_date['ending_defrev'] - results[ 'dr_defrev'] # Copy over results of refund calculation to schedule; debits/credits are effective the refund day for (key, value) in results.iteritems(): revrec_schedule[ref.refund_date][key] = value # If service term is cancelled, any remaining deferred revenue is recognized on the day of the refund if ref.cancel_flag: revrec_schedule[ref.refund_date]['cr_rev'] = remaining_defrev revrec_schedule[ref.refund_date]['cumul_rev'] = revrec_schedule[ ref.refund_date - timedelta(1)]['cumul_rev'] + remaining_defrev revrec_schedule[ref.refund_date]['ending_defrev'] = 0 # Now that DR is zeroed out, there is no more amortization # print 'Clearing schedule from %s to %s' % (ref.refund_date + timedelta(1), last_day_of_schedule) for date in daterange(ref.refund_date + timedelta(1), last_day_of_schedule): revrec_schedule[date]['cr_rev'] = 0 revrec_schedule[date]['ending_defrev'] = 0 revrec_schedule[date]['cumul_rev'] = 0 revrec_schedule[date]['dr_defrev'] = 0 # If service term not cancelled, remaining deferred revenue is amortized through end of service term else: amortize_amount(revrec_schedule=revrec_schedule, amt=remaining_defrev, start_date=ref.refund_date, end_date=last_day_of_schedule)
"gis") + 1 # setup unique IDs df_dim_vals = dfh.build_dimension_values_df( pid_meta, df_dims, next_dim_val_id) dim_val_result = db.insert_dataframe_rows(df_dim_vals, "DimensionValues", "gis") h.delete_var_and_release_mem([df_dims, df_dim_vals]) ########################################################### # APPEND - runs whether inserting or updating a table merged_prod_dict = jh.get_merged_tables_from_json( products_to_merge_json) # find info about any merged tables products_to_update = [] # create list of products to be updated if start_date and end_date: # update products for specified date range - this section only executes if --start and --end args are present for dt in h.daterange(start_date, end_date): changed_cubes = wds.get_changed_cube_list( dt.strftime("%Y-%m-%d")) # find out which cubes have changed prod_list = db.get_matching_product_list( changed_cubes) # find out which of these cubes exist in the db logger.info( str(len(prod_list)) + " table(s) found for " + dt.strftime("%Y-%m-%d") + ": " + str(prod_list)) products_to_update.extend(prod_list) # if any product in the changed cube list is part of a merged product, remove the prodid from the list to be # processed and notify the user that they will have to update that product separately check_pids = products_to_update.copy() for check_pid in check_pids: if jh.is_master_in_merged_product(check_pid, merged_prod_dict): logger.warning(
def apply_refunds(revrec_schedule, invoice_amount, item, refunds): """Adjusts the specified revrec schedule for refunds. :param revrec_schedule: Dictionary of debits and credits by day :param invoice_amount: Total amount of the invoice :param item: The Item object :param refunds: Refund objects """ # Adjustment amortization due to refunds for ref in refunds: proportion = item.total_amount/invoice_amount refund_applied = ref.refund_amount * proportion last_day_of_schedule = max(revrec_schedule.keys()) # Deferred revenue and total revenue recognized as of beginning of refund date stats_as_of_refund_date = { 'ending_defrev': revrec_schedule[day_before(ref.refund_date)]['ending_defrev'], 'cr_rev': sum([row['cr_rev'] for date, row in revrec_schedule.iteritems() if date < ref.refund_date]) } # positive_item_amount: is the invoice item a charge vs. discount/pro-rated credit? # service_cancelled: is refund related to service being cancelled? flags = { 'positive_item_amount': item.total_amount >= 0, 'service_cancelled': ref.cancel_flag } # Calculate debits and credits associated with refund event results = refund_calc(flags=flags, revrec_start_date=min(revrec_schedule.keys()), refund_date=ref.refund_date, refund_amount=refund_applied, stats_as_of_refund_date=stats_as_of_refund_date) # Remaining deferred revenue remaining_defrev = stats_as_of_refund_date['ending_defrev'] - results['dr_defrev'] # Copy over results of refund calculation to schedule; debits/credits are effective the refund day for (key, value) in results.iteritems(): revrec_schedule[ref.refund_date][key] = value # If service term is cancelled, any remaining deferred revenue is recognized on the day of the refund if ref.cancel_flag: revrec_schedule[ref.refund_date]['cr_rev'] = remaining_defrev revrec_schedule[ref.refund_date]['cumul_rev'] = revrec_schedule[ref.refund_date - timedelta(1)]['cumul_rev'] + remaining_defrev revrec_schedule[ref.refund_date]['ending_defrev'] = 0 # Now that DR is zeroed out, there is no more amortization # print 'Clearing schedule from %s to %s' % (ref.refund_date + timedelta(1), last_day_of_schedule) for date in daterange(ref.refund_date + timedelta(1), last_day_of_schedule): revrec_schedule[date]['cr_rev'] = 0 revrec_schedule[date]['ending_defrev'] = 0 revrec_schedule[date]['cumul_rev'] = 0 revrec_schedule[date]['dr_defrev'] = 0 # If service term not cancelled, remaining deferred revenue is amortized through end of service term else: amortize_amount(revrec_schedule=revrec_schedule, amt=remaining_defrev, start_date=ref.refund_date, end_date=last_day_of_schedule)
def number_of_articles(project, startdate, enddate): res = {} for date in daterange(date_from_iso(startdate), date_from_iso(enddate)): res[date_to_iso(date)] = 100 return res
def calculate_user_values(self, username): memcache_data_key = '!data!{}'.format(username) values = json.loads(memcache.get(memcache_data_key) or '{}') if values: return values try: github_user = User.get(username) except pyresto.Error: self.response.set_status(404) # not 100% sure but good enough self.render('errors/404') return except Exception as err: self.response.set_status(500) logging.error(err) return languages = User.sort_languages(github_user.language_stats) fork_count = sum((1 for repo in github_user.repos if repo.fork)) today = datetime.datetime.today() recent_than = today - datetime.timedelta(days=RECENT_DAYS) own_commits = github_user.get_latest_commits(recent_than) commits_by_repo = reduce(self.reduce_commits_by_repo, own_commits, dict()) if commits_by_repo: last_project = max(commits_by_repo, key=commits_by_repo.get) else: last_project = '' logging.info(commits_by_repo) if last_project: last_project_url = [repo.html_url for repo in github_user.repos if repo.name == last_project][0] else: last_project_url = None commits_by_date = reduce(self.reduce_commits_by_date, own_commits, dict()) range = daterange(recent_than, today) for d in range: key = unicode(d.date()) if key not in commits_by_date: commits_by_date[key] = 0 commit_data = [commits_by_date[d] for d in sorted(commits_by_date)] max_commits = max(commit_data) logging.debug('Commit data %s', str(commit_data)) commit_sparkline = 'data:image/png;base64,' +\ base64.b64encode( sparklines.impulse(commit_data, below_color='SlateGray', width=3, dmin=0, dmax=max(commit_data) ), ) values = {'user': github_user.__dict__, 'own_repos': github_user.public_repos - fork_count, 'fork_repos': fork_count, 'languages': languages, 'project_followers': github_user.project_followers -\ github_user.public_repos, 'commit_sparkline': commit_sparkline, 'max_commits': max_commits, 'last_project': last_project, 'last_project_url': last_project_url, 'days': RECENT_DAYS } if not memcache.set(memcache_data_key, json.dumps(values), MEMCACHE_EXPIRATION): logging.error('Memcache set failed for user data %s', username) return values
def apply_grace_period(revrec_schedule, item, payment_date): """Adjusts the specified revrec schedule for late payment, which requires journal entries related to grace period. :param revrec_schedule: Dictionary of debits and credits by day to be adjusted due to application of grace period. :param item: InvoiceItem object :param payment_date: Date of payment :return list. Supporting notes on grace period calculations. Displayed in UI output. """ gp_notes = [] service_start = item.service_start service_end = item.service_end # Identify the details of the prev paid service period. shift = {'Monthly': 30, 'Yearly': 365, 'Biyearly': 730} prev_service_end = day_before(service_start) prev_service_start = prev_service_end - timedelta(shift[item.billperiod] - 1) prev_service_term = days_elapsed(prev_service_start, prev_service_end) prev_amount = item.total_amount prev_amort = prev_amount / prev_service_term # Financial reporting days. Important for determining debit and credit entries. current_reporting_day = datetime(service_start.year, service_start.month, last_day_of_month(service_start.year, service_start.month)) prev_reporting_day = day_before(datetime(prev_service_end.year, prev_service_end.month, 1)) # Notes for display in the UI output. gp_notes.append('JOURNAL ENTRIES FOR GRACE PERIOD') gp_notes.append('---'*60) gp_notes.append('payment date: %s, current term: %s -> %s, prev term: %s -> %s' % ( pretty_date(payment_date), pretty_date(service_start), pretty_date(service_end), pretty_date(prev_service_start), pretty_date(prev_service_end))) gp_notes.append('current reporting day: %s, prev reporting day: %s, days reserved for: %s -> %s' % ( pretty_date(current_reporting_day), pretty_date(prev_reporting_day), prev_service_start, prev_reporting_day)) # For each date that payment is late... running_total_dr_reserve = 0 for date in daterange(service_start, day_before(payment_date)): # The previous service term is extended by the grace period used revised_service_term = prev_service_term + days_elapsed(service_start, date) # Revenue amortization for the extended service term revised_amort = prev_amount / revised_service_term # Difference in the daily amortization for the previous service term amort_difference = prev_amort - revised_amort # The difference b/n revenue that should have been recognized vs. what was recognized in the period # prior to the previous reporting day. This amount has already been reserved for, so the journal # entry is a debit against the reserve for grace periods. revised_dr_reserve_graceperiod = days_elapsed(prev_service_start, prev_reporting_day) * amort_difference # We have been debiting the reserve for each day late, this day's debit amount is equal to the # incremental debit against the reserve. The other side of the entry is credit contra-revenue. dr_reserve_graceperiod = revised_dr_reserve_graceperiod - running_total_dr_reserve cr_contra_rev = dr_reserve_graceperiod revrec_schedule[date]['dr_reserve_graceperiod'] = dr_reserve_graceperiod revrec_schedule[date]['cr_contra_rev'] = cr_contra_rev gp_notes.append('%s, PrevTerm: %s->%s, PrevAmort: $%s->$%s, Value: %s, DaysReservedFor: %s, \ PrevTotalReserve %s, TotalReserve: %s, DR reserve: %s' % ( pretty_date(date), prev_service_term, revised_service_term, round(prev_amort, 3), round(revised_amort, 3), revised_service_term * revised_amort, (prev_reporting_day - prev_service_start).days + 1, round(running_total_dr_reserve, 3), round(revised_dr_reserve_graceperiod, 3), round(dr_reserve_graceperiod,3))) running_total_dr_reserve += dr_reserve_graceperiod gp_notes.append('---'*60) return gp_notes
def calculate_user_values(self, username): memcache_data_key = '!data!{}'.format(username) values = json.loads(memcache.get(memcache_data_key) or '{}') if values: return values try: github_user = User.get(username) except pyresto.Error: self.response.set_status(404) # not 100% sure but good enough self.render('errors/404') return except Exception as err: self.response.set_status(500) logging.error(err) return languages = User.sort_languages(github_user.language_stats) fork_count = sum(1 for repo in github_user.repos if repo.fork) today = datetime.datetime.today() recent_than = today - datetime.timedelta(days=RECENT_DAYS) own_commits = github_user.get_latest_commits(recent_than) commits_by_repo = reduce(self.reduce_commits_by_repo, own_commits, dict()) if commits_by_repo: last_project = max(commits_by_repo, key=commits_by_repo.get) else: last_project = '' logging.info(commits_by_repo) if last_project: last_project_url = [repo.html_url for repo in github_user.repos if repo.name == last_project][0] else: last_project_url = None commits_by_date = reduce(self.reduce_commits_by_date, own_commits, dict()) range = daterange(recent_than, today) for d in range: key = unicode(d.date()) if key not in commits_by_date: commits_by_date[key] = 0 commit_data = [commits_by_date[d] for d in sorted(commits_by_date)] max_commits = max(commit_data) logging.debug('Commit data %s', str(commit_data)) commit_sparkline = data_uri(sparklines.impulse(commit_data, below_color='SlateGray', width=3, dmin=0, dmax=max(commit_data))) try: # try to embed the scaled-down user avatar avatar = Image(urllib2.urlopen(github_user.avatar_url).read()) avatar.resize(24, 24) github_user.avatar_url = data_uri(avatar.execute_transforms()) except (AttributeError, ValueError, urllib2.URLError): pass user_info = dict((k, v) for k, v in github_user.__dict__.iteritems() if k[0] != '_') values = {'user': user_info, 'own_repos': len(github_user.repos) - fork_count, 'fork_repos': fork_count, 'languages': languages, 'project_followers': github_user.project_followers -\ len(github_user.self_watched), 'commit_sparkline': commit_sparkline, 'max_commits': max_commits, 'last_project': last_project, 'last_project_url': last_project_url, 'days': RECENT_DAYS } if not memcache.set(memcache_data_key, json.dumps(values), MEMCACHE_EXPIRATION): logging.error('Memcache set failed for user data %s', username) return values
def calculate_user_values(self, username): memcache_data_key = '!data!{}'.format(username) values = json.loads(memcache.get(memcache_data_key) or '{}') if values: return values try: github_user = User.get(username) except pyresto.Error: self.response.set_status(404) # not 100% sure but good enough self.render('errors/404') return except Exception as err: self.response.set_status(500) logging.error(err) return languages = User.sort_languages(github_user.language_stats) fork_count = sum((1 for repo in github_user.repos if repo.fork)) today = datetime.datetime.today() recent_than = today - datetime.timedelta(days=RECENT_DAYS) own_commits = github_user.get_latest_commits(recent_than) commits_by_repo = reduce(self.reduce_commits_by_repo, own_commits, dict()) if commits_by_repo: last_project = max(commits_by_repo, key=commits_by_repo.get) else: last_project = '' logging.info(commits_by_repo) if last_project: last_project_url = [ repo.html_url for repo in github_user.repos if repo.name == last_project ][0] else: last_project_url = None commits_by_date = reduce(self.reduce_commits_by_date, own_commits, dict()) range = daterange(recent_than, today) for d in range: key = unicode(d.date()) if key not in commits_by_date: commits_by_date[key] = 0 commit_data = [commits_by_date[d] for d in sorted(commits_by_date)] max_commits = max(commit_data) logging.debug('Commit data %s', str(commit_data)) commit_sparkline = 'data:image/png;base64,' +\ base64.b64encode( sparklines.impulse(commit_data, below_color='SlateGray', width=3, dmin=0, dmax=max(commit_data) ), ) values = {'user': github_user.__dict__, 'own_repos': github_user.public_repos - fork_count, 'fork_repos': fork_count, 'languages': languages, 'project_followers': github_user.project_followers -\ github_user.public_repos, 'commit_sparkline': commit_sparkline, 'max_commits': max_commits, 'last_project': last_project, 'last_project_url': last_project_url, 'days': RECENT_DAYS } if not memcache.set(memcache_data_key, json.dumps(values), MEMCACHE_EXPIRATION): logging.error('Memcache set failed for user data %s', username) return values