def main_entry(argv): parser = argparse.ArgumentParser(description= 'Generates Changelog from now until the specified date' ) parser.add_argument( 'target_date', help='Specify when should stop generating . Accepts "YYYY-MM-DD" ISO 8601 Date format.', type=lambda d: datetime.combine(date.fromisoformat(d), datetime.min.time()) ) parser.add_argument( '-t', '--token-file', help='Specify where to read the Personal Token. Default "~/.generate_changelog.token".', type=lambda x: pathlib.Path(x).expanduser().resolve(), default='~/.generate_changelog.token' ) parser.add_argument( '-o', '--output-file', help='Specify where to write extracted information. Default to standard output.', type=lambda x: pathlib.Path(x).expanduser().resolve(), default=None ) parser.add_argument( '--verbose', action='store_true', help='Indicates the logging system to generate more information about actions.', default=None ) parser.add_argument( '-N', '--include-summary-none', action='store_true', help='Indicates if Pull Requests with Summary "None" should be included in the output."', default=None ) arguments = parser.parse_args(argv[1:]) logging.basicConfig(level=logging.DEBUG if arguments.verbose else logging.INFO, format=' LOG | %(threadName)s | %(levelname)s | %(message)s') log.debug(f'Commandline Arguments (+defaults): {arguments}') if (arguments.output_file is not None and (not arguments.output_file.parent.exists() or not arguments.output_file.parent.is_dir())): raise ValueError(f"Specified directory for Output File doesn't exist: {arguments.output_file.parent}") main(arguments.target_date, arguments.token_file, arguments.output_file, arguments.include_summary_none)
def on_account_update_signal( debtor_id: int, creditor_id: int, last_change_ts: str, last_change_seqnum: int, principal: int, interest: float, interest_rate: float, demurrage_rate: float, commit_period: int, transfer_note_max_bytes: int, last_interest_rate_change_ts: str, last_transfer_number: int, last_transfer_committed_at: str, last_config_ts: str, last_config_seqnum: int, creation_date: str, negligible_amount: float, config_data: str, config_flags: int, ts: str, ttl: int, account_id: str, debtor_info_iri: str, debtor_info_content_type: str, debtor_info_sha256: str, *args, **kwargs) -> None: assert 0 <= transfer_note_max_bytes <= TRANSFER_NOTE_MAX_BYTES assert len(config_data) <= CONFIG_DATA_MAX_BYTES and len(config_data.encode('utf8')) <= CONFIG_DATA_MAX_BYTES assert account_id == '' or len(account_id) <= 100 and account_id.encode('ascii') assert len(debtor_info_iri) <= 200 assert debtor_info_content_type == '' or ( len(debtor_info_content_type) <= 100 and debtor_info_content_type.encode('ascii')) assert debtor_info_sha256 == '' or len(debtor_info_sha256) == 64 procedures.process_account_update_signal( debtor_id=debtor_id, creditor_id=creditor_id, creation_date=date.fromisoformat(creation_date), last_change_ts=datetime.fromisoformat(last_change_ts), last_change_seqnum=last_change_seqnum, principal=principal, interest=interest, interest_rate=interest_rate, last_interest_rate_change_ts=datetime.fromisoformat(last_interest_rate_change_ts), transfer_note_max_bytes=transfer_note_max_bytes, last_config_ts=datetime.fromisoformat(last_config_ts), last_config_seqnum=last_config_seqnum, negligible_amount=negligible_amount, config_flags=config_flags, config_data=config_data, account_id=account_id, debtor_info_iri=debtor_info_iri or None, debtor_info_content_type=debtor_info_content_type or None, debtor_info_sha256=b16decode(debtor_info_sha256, casefold=True) or None, last_transfer_number=last_transfer_number, last_transfer_committed_at=datetime.fromisoformat(last_transfer_committed_at), ts=datetime.fromisoformat(ts), ttl=ttl, )
def set_date(session, context, poll): """Show to vote type keyboard.""" poll.current_date = date.fromisoformat(context.action) update_datepicker(context, poll) context.query.answer(i18n.t('callback.date_changed', locale=poll.locale, date=poll.current_date.isoformat()))
def list_for_day(db: PeeweeSession, user: hug.directives.user, start_date: hug.types.text, end_date: hug.types.text): user_name = user.user_name with db.atomic(): try: user_role = user.role start_day_object = date.fromisoformat(start_date) end_day_object = date.fromisoformat(end_date) result = io.BytesIO() workbook = xlsxwriter.Workbook(result) worksheet = workbook.add_worksheet() bold = workbook.add_format({'bold': 1}) date_format = workbook.add_format({'num_format': 'dd.mm.yyyy'}) time_format = workbook.add_format({'num_format': 'hh:mm'}) worksheet.set_column('A:A', 15) worksheet.set_column('B:B', 8) worksheet.set_column('C:C', 18) worksheet.set_column('D:D', 15) worksheet.set_column('E:E', 18) worksheet.set_column('F:F', 15) worksheet.set_column('G:G', 15) worksheet.set_column('H:H', 15) worksheet.set_column('I:I', 15) worksheet.set_column('J:J', 15) worksheet.set_column('K:K', 15) worksheet.set_column('L:L', 15) worksheet.set_column('M:M', 15) worksheet.set_column('N:N', 15) worksheet.set_column('O:O', 15) worksheet.write('A1', 'Termin', bold) worksheet.write('B1', 'Uhrzeit', bold) worksheet.write('C1', 'Vorname', bold) worksheet.write('D1', 'Nachname', bold) worksheet.write('E1', 'Telefon', bold) worksheet.write('F1', 'Straße', bold) worksheet.write('G1', 'Hausnummer', bold) worksheet.write('H1', 'PLZ', bold) worksheet.write('I1', 'Stadt', bold) worksheet.write('J1', 'Geburtdatum', bold) worksheet.write('K1', 'Risikokategorie 1', bold) worksheet.write('L1', 'Berechtigungscode', bold) worksheet.write('M1', 'Behörde', bold) worksheet.write('N1', 'Gebucht von', bold) worksheet.write('O1', 'Gebucht am', bold) row = 1 col = 0 for timeslot in TimeSlot.select().where( (TimeSlot.start_date_time >= start_day_object) & (TimeSlot.start_date_time < end_day_object + timedelta(days=1))).order_by( TimeSlot.start_date_time.desc()): for appointment in Appointment.select().where( (Appointment.time_slot == timeslot) & (Appointment.booked == True)): try: booking = Booking.get( Booking.appointment == appointment) if user_role != UserRoles.ADMIN: booking = Booking.select().where((Booking.appointment == appointment) & (Booking.booked_by == user_name)).get() worksheet.write_datetime( row, col, timeslot.start_date_time, date_format) worksheet.write_datetime( row, col + 1, timeslot.start_date_time, time_format) worksheet.write_string( row, col + 2, booking.first_name) worksheet.write_string(row, col + 3, booking.surname) worksheet.write_string(row, col + 4, booking.phone) worksheet.write_string( row, col + 5, booking.street if booking.street is not None else "") worksheet.write_string( row, col + 6, booking.street_number if booking.street_number is not None else "") worksheet.write_string( row, col + 7, booking.post_code if booking.post_code is not None else "") worksheet.write_string( row, col + 8, booking.city if booking.city is not None else "") if booking.birthday is None: worksheet.write_string(row, col + 9, "") else: worksheet.write_datetime( row, col + 9, booking.birthday, date_format) worksheet.write_string( row, col + 10, booking.reason if booking.reason is not None else "") worksheet.write_string(row, col + 11, booking.secret) worksheet.write_string(row, col + 12, booking.office) worksheet.write_string( row, col + 13, booking.booked_by) worksheet.write_datetime( row, col + 14, booking.booked_at, date_format) row += 1 except DoesNotExist as e: pass workbook.close() result.flush() return result.getvalue() except DoesNotExist as e: raise hug.HTTPGone except ValueError as e: raise hug.HTTPBadRequest
def writePersonal(flightsRecent, flightsLocal, nrecentMonths, headerDict, locale, groupName, htmlFilePath, note): ''' Recent personal improvement''' pageFile = '{}-personal.hbs'.format(htmlFilePath) flightsOld = deepcopy(flightsLocal) flightsPBestDist = zeros(500, dtype=[('UTCFlightDateStr', unicode_, 20), ('localFlightDateStr', unicode_, 20), ('name', unicode_, 100), ('points', float32), ('duration', float32), ('speed', float32), ('airport', unicode_, 20), ('glider', unicode_, 20), ('url', unicode_, 200), ('published', bool_), ('pointsGain', float32), ('speedGain', float32)]) flightsPBestSpeed = zeros(500, dtype=[('UTCFlightDateStr', unicode_, 20), ('localFlightDateStr', unicode_, 20), ('name', unicode_, 100), ('points', float32), ('duration', float32), ('speed', float32), ('airport', unicode_, 20), ('glider', unicode_, 20), ('url', unicode_, 200), ('published', bool_), ('pointsGain', float32), ('speedGain', float32)]) for i in range(len(flightsRecent)): if date.fromisoformat( flightsRecent[i]['localFlightDateStr'] ) < date.today() - timedelta(days=30 * nrecentMonths): flightsRecent[i]['points'] = 0.0 flightsRecent[i]['speed'] = 0.0 for i in range(len(flightsOld)): if date.fromisoformat( flightsRecent[i]['localFlightDateStr'] ) >= date.today() - timedelta(days=30 * nrecentMonths): flightsOld[i]['points'] = 0.0 flightsOld[i]['speed'] = 0.0 pilots = list(set(flightsRecent['name'])) '''Recent Improvement Dist, sort by name''' iPBest = 0 for pilot in pilots: pilotRecentDistFlights = sort( flightsRecent[flightsRecent['name'] == pilot], order=['points'])[::-1] pilotOldFlights = sort(flightsOld[flightsOld['name'] == pilot], order=['points'])[::-1] if pilotRecentDistFlights[0]['points'] - pilotOldFlights[0][ 'points'] > 0.5: flightsPBestDist[iPBest] = pilotRecentDistFlights[0] flightsPBestDist[iPBest]['pointsGain'] = pilotRecentDistFlights[0][ 'points'] - pilotOldFlights[0]['points'] iPBest += 1 flightsPBestDist = sort(flightsPBestDist[:iPBest], order=['name']) #sort by name '''Recent Improvement Speed, sort by name''' iPBest = 0 for pilot in pilots: pilotRecentSpeedFlights = sort( flightsRecent[flightsRecent['name'] == pilot], order=['speed'])[::-1] pilotOldFlights = sort(flightsOld[flightsOld['name'] == pilot], order=['speed'])[::-1] if pilotRecentSpeedFlights[0]['speed'] - pilotOldFlights[0][ 'speed'] > 0.5: flightsPBestSpeed[iPBest] = pilotRecentSpeedFlights[0] flightsPBestSpeed[iPBest]['speedGain'] = pilotRecentSpeedFlights[ 0]['speed'] - pilotOldFlights[0]['speed'] iPBest += 1 flightsPBestSpeed = sort(flightsPBestSpeed[:iPBest], order=['name']) #sort by name '''Begin Page''' lines = pageStart('Personal Records') lines.append(' <div class="page-header"> \n') lines.append(' <h2>{} Flights Posted by Members of {}</h2> \n'.format( locale, groupName)) lines.append(' </div> \n') '''Personal distance records, recent''' fields = [ 'name', 'points', 'pointsGain', 'localFlightDateStr', 'airport', 'duration', 'speed', 'glider', 'url' ] lines.append(' <div> \n') lines.append( ' <h3>Personal Distance Records Set in the Last {} Months</h3> \n'. format(nrecentMonths)) lines.append(' </div> \n') lines.append('<table class="table table-striped"> \n') lines.append(' <thead> \n') # lines.append(' <th class="column-buttons">#</th>') for field in fields: lines.append(' <th class="column-buttons">{}</th> \n'.format( headerDict[field])) lines.append(' </thead> \n') lines.append(' <tbody> \n') for i, flight in enumerate(flightsPBestDist): lines.append('\t<tr> \n') # lines.append('\t\t<td align = "left"> {} </td> \n'.format(i + 1)) for field in fields: if field == 'url': lines.append( '\t\t<td align = "left"> <a href="{}">link</a> </td> \n'. format(flight[field])) elif field in ['points', 'speed', 'pointsGain', 'speedGain']: lines.append('\t\t<td align = "left">{} </td> \n'.format( int(round(flight[field])))) elif field == 'duration': lines.append('\t\t<td align = "left">{:.1f} hr</td> \n'.format( flight[field])) else: lines.append('\t\t<td align = "left">{}</td> \n'.format( flight[field])) lines.append('\t</tr> \n') lines.append(' </tbody> \n') lines.append(' </table> \n') # ######################################################################################################## '''Personal speed records, recent''' fields = [ 'name', 'speed', 'speedGain', 'localFlightDateStr', 'airport', 'duration', 'points', 'glider', 'url' ] lines.append(' <div> \n') lines.append( ' <h3>Personal Speed Records Set in the Last {} Months</h3> \n'. format(nrecentMonths)) lines.append(' </div> \n') lines.append('<table class="table table-striped"> \n') lines.append(' <thead> \n') # lines.append(' <th class="column-buttons">#</th>') for field in fields: lines.append(' <th class="column-buttons">{}</th> \n'.format( headerDict[field])) lines.append(' </thead> \n') lines.append(' <tbody> \n') for i, flight in enumerate(flightsPBestSpeed): lines.append('\t<tr> \n') # lines.append('\t\t<td align = "left"> {} </td> \n'.format(i + 1)) for field in fields: if field == 'url': lines.append( '\t\t<td align = "left"> <a href="{}">link</a> </td> \n'. format(flight[field])) elif field in ['points', 'speed', 'pointsGain', 'speedGain']: lines.append('\t\t<td align = "left">{} </td> \n'.format( int(round(flight[field])))) elif field == 'duration': lines.append('\t\t<td align = "left">{:.1f} hr</td> \n'.format( flight[field])) else: lines.append('\t\t<td align = "left">{}</td> \n'.format( flight[field])) lines.append('\t</tr> \n') lines.append(' </tbody> \n') lines.append(' </table> \n') lines = pageEnd(lines, note) writeFile(pageFile, lines)
def main(): f = CodFiscale("Luongo", 'Roberto', 'M', dt.fromisoformat('1968-06-01'), 'Pozzuoli') print(f.creaParteCognome()+f.creaParteNome()+f.creaParteData() + f.creaParteLuogo() + f.creaCin()) print(f"Il codice fiscale è: \033[32m{f.stampaCF()}")
def earliest_start_date() -> date: return date.fromisoformat("0001-01-01")
</tr> </thead> <tbody> """ # Build the first (date-based) table table_techniques = [] for analytic in sorted(analytics, key=lambda k: k['id']): print("building date-based table, including {}".format(analytic['id'])) coverage = "" implementations = "" car_id = "<a href=\"/analytics/{}/\">{}</a>".format( analytic["id"], analytic["id"]) title = analytic["title"] date_added = analytic["submission_date"] date_str = date.fromisoformat(date_added.replace("/", "-")).strftime("%B %d %Y") if 'coverage' in analytic and len(analytic['coverage']) > 0: coverage += "<ul>" count = 0 for cov in analytic['coverage']: # Only capture the first two techniques, to limit the size of the table if count < 2: coverage += "<li><a href=\"https://attack.mitre.org/techniques/{}/\">{}</a></li>".format( cov['technique'], techniques[cov['technique']]) count += 1 # Get all of the techniques seen in all analytics # This is for building the second (subtechniques based) table if cov['technique'] not in table_techniques: table_techniques.append(cov['technique']) coverage += "</ul>" if 'implementations' in analytic and len(analytic['implementations']) > 0:
def __init__(self, parser: ConfigParser): self.swiss_cases = pd.read_csv(parser.get("urls", "swiss_cases")) self.swiss_fatalities = pd.read_csv( parser.get("urls", "swiss_fatalities")) self.swiss_hospitalizations = pd.read_csv( parser.get("urls", "swiss_hospitalizations")) self.swiss_icu = pd.read_csv(parser.get("urls", "swiss_icu")) self.swiss_releases = pd.read_csv(parser.get("urls", "swiss_releases")) self.swiss_demography = pd.read_csv(parser.get("urls", "swiss_demography"), index_col=0) self.world_cases = self.__simplify_world_data( pd.read_csv(parser.get("urls", "world_cases"))) self.world_fataltities = self.__simplify_world_data( pd.read_csv(parser.get("urls", "world_fatalities"))) self.world_population = self.__get_world_population() self.swiss_cases_by_date = self.swiss_cases.set_index("Date") self.swiss_fatalities_by_date = self.swiss_fatalities.set_index("Date") self.swiss_hospitalizations_by_date = self.swiss_hospitalizations.set_index( "Date") self.swiss_cases_by_date_filled = self.swiss_cases_by_date.fillna( method="ffill", axis=0) self.swiss_cases_by_date_diff = self.swiss_cases_by_date_filled.diff( ).replace(0, float("nan")) self.swiss_cases_by_date_diff["date_label"] = [ date.fromisoformat(d).strftime("%d. %m.") for d in self.swiss_cases_by_date_diff.index.values ] self.swiss_fatalities_by_date_diff = self.swiss_fatalities_by_date.diff( ).replace(0, float("nan")) self.swiss_hospitalizations_by_date_diff = self.swiss_hospitalizations_by_date.diff( ).replace(0, float("nan")) self.swiss_cases_by_date_filled = self.swiss_cases_by_date.fillna( method="ffill", axis=0) self.swiss_fatalities_by_date_filled = self.swiss_fatalities_by_date.fillna( method="ffill", axis=0) self.swiss_hospitalizations_by_date_filled = self.swiss_hospitalizations_by_date.fillna( method="ffill", axis=0) self.swiss_case_fatality_rates = ( self.swiss_fatalities_by_date_filled / self.swiss_cases_by_date_filled) self.swiss_cases_by_date_filled_per_capita = ( self.__get_swiss_cases_by_date_filled_per_capita()) self.latest_date = self.__get_latest_date() self.updated_cantons = self.__get_updated_cantons() self.new_swiss_cases = self.__get_new_cases() self.total_swiss_cases = self.__get_total_swiss_cases() self.total_swiss_fatalities = self.__get_total_swiss_fatalities() self.swiss_case_fatality_rate = (self.total_swiss_fatalities / self.total_swiss_cases) # Put the date at the end self.swiss_cases_as_dict = self.swiss_cases.to_dict("list") date_tmp = self.swiss_cases_as_dict.pop("Date") self.swiss_cases_as_dict["Date"] = date_tmp self.swiss_cases_normalized_as_dict = ( self.__get_swiss_cases_as_normalized_dict()) self.swiss_fatalities_as_dict = self.swiss_fatalities.to_dict("list") self.canton_labels = [ canton for canton in self.swiss_cases_as_dict if canton != "AT" and canton != "Date" ] self.cantonal_centres = self.__get_cantonal_centres() # # Moving average showing development # self.moving_total = self.__get_moving_total( self.swiss_cases_by_date.diff()).replace(0, float("nan")) self.swiss_cases_by_date_diff["AT_rolling"] = np.round( self.swiss_cases_by_date_diff["AT"].rolling(7, center=True).mean(), 0, ) self.swiss_fatalities_by_date_diff["AT_rolling"] = np.round( self.swiss_fatalities_by_date_diff["AT"].rolling( 7, center=True).mean(), 0, ) # # World related data # self.world_case_fatality_rate = (self.world_fataltities.iloc[-1] / self.world_cases.iloc[-1]) self.swiss_world_cases_normalized = self.__get_swiss_world_cases_normalized( ) # # Some regression analysis on the data # self.prevalence_density_regression = self.__get_regression( self.swiss_demography["Density"], self.swiss_cases_by_date_filled_per_capita.iloc[-1], ) self.cfr_age_regression = self.__get_regression( self.swiss_demography["O65"], self.swiss_case_fatality_rates.iloc[-1]) self.scaled_cases = self.__get_scaled_cases()
pathToDataCountries = 'data/int/countries-latest-all.json' if checkRunningOnServer(): pathToDataDeDistricts = pathPrefixOnServer + pathToDataDeDistricts pathToDataDeStates = pathPrefixOnServer + pathToDataDeStates pathToDataCountries = pathPrefixOnServer + pathToDataCountries # connect to DB con, cur = db_connect() # load latest data d_data_DeDistricts = {} with open(pathToDataDeDistricts, mode='r', encoding='utf-8') as fh: d_data_DeDistricts = json.load(fh) s_date_data_hh = d_data_DeDistricts["02000"]["Date_Latest"] date_data_hh = date.fromisoformat(s_date_data_hh) date_yesterday = date.today()-timedelta(days=1) assert date_data_hh == date_yesterday, f"date data hh: {date_data_hh} != date yesterday {date_yesterday}" del date_data_hh, date_yesterday d_data_DeStates = {} with open(pathToDataDeStates, mode='r', encoding='utf-8') as fh: d_data_DeStates = json.load(fh) d_data_Countries = {} with open(pathToDataCountries, mode='r', encoding='utf-8') as fh: # convert list to dict l = json.load(fh) for d in l: code = d['Code'] del d['Code']
def get_age(b_date: str) -> int: born = date.fromisoformat(f"{b_date[6:10]}-{b_date[3:5]}-{b_date[0:2]}") today = date.today() return today.year - born.year - ((today.month, today.day) < (born.month, born.day))
def exchanges(currency, convert_date): """Expose /exchanges GET endpoint.""" rate_to_pln = ExchangeRateToPLN().get_exchange_rate_to_pln(currency, date.fromisoformat(convert_date)) return jsonify({"currency": currency, "rate_to_pln": rate_to_pln, "date": convert_date})
from datetime import date dob = input('Please input your date of birth in the format YYYY-MM-DD\n') date_of_birth = date.fromisoformat(dob) now = date.today() diff = now - date_of_birth print(f'You are {diff.days} days old')
import sys from datetime import date dateFromTerminal = date.fromisoformat(sys.argv[1]) today = date.today() datediff = today - dateFromTerminal print(abs(datediff.days))
def inputMoney(self, txtFin, targetDate): #dt = Harvey.startDate inputMoney = 0 fin_rate = 0.4 holdStock = {} txtIndex = 1 while txtIndex <= len(txtFin) - 1: if date.fromisoformat( txtFin[txtIndex][:10]) > date.fromisoformat(targetDate): break match = re.search( r"(\d+-\d+-\d+)(,)(\w+)(,)(\d+)(,)(\d+)(,)(\d+)(\(\w+\))", txtFin[txtIndex]) order = match.group(3) sym = match.group(5) share = int(match.group(7)) price = int(match.group(9)) pair = { "close_nake": "long_nake", "close_short": "short", "close_fin": "long_fin" } if order in ["long_nake", "long_fin", "short"]: if sym in holdStock: holdStock[sym].append(txtFin[txtIndex]) else: holdStock[sym] = [txtFin[txtIndex]] else: if order in ["close_nake", "close_fin"]: if not sym in holdStock: raise ValueError("close action is wrong") else: for j in range(len(holdStock[sym])): # record before the date rec = re.search( r"(\d+-\d+-\d+)(,)(\w+)(,)(\d+)(,)(\d+)(,)(\d+)(\(\w+\))", holdStock[sym][j]) date_rec = rec.group(1) order_rec = rec.group(3) sym_rec = rec.group(5) share_rec = int(rec.group(7)) cost_rec = int(rec.group(9)) # buy price if pair[order] == order_rec: diff = share - share_rec # "share" is the number of stock you closed if diff > 0: share -= share_rec # share = "0" holdStock[sym][j] = rec.group( 1) + rec.group(2) + rec.group( 3) + rec.group(4) + rec.group( 5 ) + rec.group(6) + "0" + rec.group( 8) + "0" + rec.group(10) elif diff == 0: holdStock[sym][j] = rec.group( 1) + rec.group(2) + rec.group( 3) + rec.group(4) + rec.group( 5 ) + rec.group(6) + "0" + rec.group( 8) + "0" + rec.group(10) break elif diff < 0: cost_portion = (cost_rec * share) // share_rec holdStock[sym][j] = rec.group( 1 ) + rec.group(2) + rec.group(3) + rec.group( 4) + rec.group(5) + rec.group(6) + str( -diff) + rec.group(8) + str( cost_rec - cost_portion) + rec.group(10) break txtIndex += 1 for i in holdStock: for j in holdStock[i]: m = re.search( r"(\d+-\d+-\d+)(,)(\w+)(,)(\d+)(,)(\d+)(,)(\d+)(\(\w+\))", j) order = m.group(3) price = int(m.group(9)) #print("price: ", price) if order == "long_nake": inputMoney += price elif order == "long_fin": inputMoney += price * fin_rate return inputMoney
def get_no_spam(self, id): try: d = self.users[id]['extra']['no_spam_until'] return date.today() <= date.fromisoformat(d) except KeyError: return False
def relize(self, txtFin, start, end): # e.g. {5478:{long_nake:2000,long_fin:1000}, # 1101:{long_fin,3000},2251:{short,1000} #print(Harvey.relize(txtFin, sys.argv[3], sys.argv[4])) # from datetime import timedelta, date #don't know if date.fromisoformat(start) > date.fromisoformat(end): raise ValueError("the date you order is wrong!") dt = start dt_before = str(date.fromisoformat(start) - timedelta(1)) holdStock = self.stockInf(txtFin, dt_before) relize = 0 profit = 0 txtIndex = 1 transaction_fee_rate = 0.001425 interestRate_fin_rate = 0.065 tax_stock_rate = 0.003 while txtIndex <= len(txtFin) - 1: if date.fromisoformat( txtFin[txtIndex][:10]) > date.fromisoformat(end): return profit if date.fromisoformat(dt) > date.fromisoformat( txtFin[txtIndex][:10]): txtIndex += 1 continue if date.fromisoformat(dt) < date.fromisoformat( txtFin[txtIndex][:10]): dt = str(date.fromisoformat(dt) + timedelta(1)) continue if date.fromisoformat(dt) == date.fromisoformat( txtFin[txtIndex][:10]): match = re.search( r"(\d+-\d+-\d+)(,)(\w+)(,)(\d+)(,)(\d+)(,)(\d+)(\(\w+\))", txtFin[txtIndex]) date_txt = match.group(1) order = match.group(3) sym = match.group(5) share = int(match.group(7)) price = int(match.group(9)) pair = { "close_nake": "long_nake", "close_short": "short", "close_fin": "long_fin" } if order in ["long_nake", "long_fin", "short"]: if sym in holdStock: holdStock[sym].append(txtFin[txtIndex]) else: holdStock[sym] = [txtFin[txtIndex]] else: if order in ["close_nake", "close_fin"]: if not sym in holdStock: raise ValueError("close action is wrong") else: cost = self.tax_stock( price, 0.003) + self.transaction_fee( price, transaction_fee_rate) for j in range(len(holdStock[sym])): # record before the date rec = re.search( r"(\d+-\d+-\d+)(,)(\w+)(,)(\d+)(,)(\d+)(,)(\d+)(\(\w+\))", holdStock[sym][j]) date_rec = rec.group(1) order_rec = rec.group(3) sym_rec = rec.group(5) share_rec = int(rec.group(7)) cost_rec = int(rec.group(9)) # buy price if pair[order] == order_rec: diff = share - share_rec # "share" is the number of stock you closed if diff > 0: share -= share_rec # share = "0" holdStock[sym][j] = rec.group( 1) + rec.group(2) + rec.group( 3) + rec.group(4) + rec.group( 5) + rec.group( 6) + "0" + rec.group( 8 ) + "0" + rec.group(10) # cost_nake == (selling price) * (tax + selling fee) + # (cost_rec)*(buy fee) cost += cost_rec + self.transaction_fee( cost_rec, transaction_fee_rate) if order == "close_fin": date_by = ( date.fromisoformat(date_txt) - date.fromisoformat(date_rec) ).days # def interst(self, interstingRate, cost_rec, date_by): cost += self.interest_fin( interestRate_fin_rate, cost_rec, date_by) elif diff == 0: holdStock[sym][j] = rec.group( 1) + rec.group(2) + rec.group( 3) + rec.group(4) + rec.group( 5) + rec.group( 6) + "0" + rec.group( 8 ) + "0" + rec.group(10) cost += cost_rec + self.transaction_fee( transaction_fee_rate, cost_rec) if order == "cost_fin": interstingRate = 0.0065 date_by = ( date.fromisoformat(date_txt) - date.fromisoformat(date_rec) ).days # def interst(self, interstingRate, cost_rec, date_by): cost += self.interest_fin( interestRate_fin_rate, cost_rec, date_by) break elif diff < 0: cost_portion = (cost_rec * share) // share_rec #print("selling cost: ", cost) #print("buy fee: ", self.fee(cost_portion)) #print("cost_portion: ",cost_portion) if order == "close_fin": cost += self.interest_fin( interestRate_fin_rate, cost_portion, date_by) cost += cost_portion + self.transaction_fee( cost_portion, transaction_fee_rate) holdStock[sym][j] = rec.group( 1) + rec.group(2) + rec.group( 3) + rec.group(4) + rec.group( 5) + rec.group(6) + str( -diff ) + rec.group(8) + str( cost_rec - cost_portion ) + rec.group(10) break profit += (price - cost) elif order == "close_fin": pass # not yet, just test another part txtIndex += 1 if txtFin[ txtIndex][:10] != dt: # maybe the user operate many time in a day dt = str(date.fromisoformat(dt) + timedelta(1)) return profit
def accrued(self, then, now): return self.amount_from(then, now) class SemesterAccrualBasis(AccrualBasis): def __init__(self, amount, start_date): end_date = start_date + relativedelta( months=+4) # 4 months to a semester super().__init__(start_date, end_date, amount) class QuarterlyAccrualBasis(AccrualBasis): def __init__(self, amount, start_date): end_date = start_date + relativedelta( months=+3) # 3 months to a quarter super().__init__(start_date, end_date, amount) class MonthlyAccrualBasis(AccrualBasis): def __init__(self, amount, start_date): end_date = start_date + relativedelta(months=+1) super().__init__(start_date, end_date, amount) real_estate_leases = [ TimedLiability(date.fromisoformat('2019-07-20'), date.fromisoformat('2020-01-31'), 999.00 * 6), TimedLiability(date.fromisoformat('2019-02-01'), date.fromisoformat('2021-02-28'), 1037.00 * 13), ]
def createDatawarehouseClient(self): monthList = { '01': 'Jan', '02': 'Feb', '03': 'Mar', '04': 'Apr', '05': 'May', '06': 'Jun', '07': 'Jul', '08': 'Aug', '09': 'Sep', '10': 'Oct', '11': 'Nov', '12': 'Dec' } mydb = mysql.connector.connect(host="localhost", user="******", passwd="", database="interDB") mycursor = mydb.cursor() mycursor.execute( "select country, countryCode, date, dateCode, sum(confirmed) as coronaConfirmed," " sum(deaths) as coronaDeaths, sum(recovered) as coronaRecovered from covidstats " "group by country, countryCode, date, dateCode") result = mycursor.fetchall() preProcessing = TweetsPreProcessing() configdb = { 'mysql': { 'driver': 'mysql', 'host': 'localhost', 'database': 'clientdb', 'user': '******', 'password': '', 'prefix': '' } } db = DatabaseManager(configdb) Model.set_connection_resolver(db) cpt = 0 for row in result: year = row[3][:4] month = row[3][4:6] day = row[3][6:8] dayOfYear = date.fromisoformat(year + '-' + month + '-' + day).timetuple().tm_yday spring = range(80, 172) summer = range(172, 264) autumn = range(264, 355) if dayOfYear in spring: season = 'spring' elif dayOfYear in summer: season = 'summer' elif dayOfYear in autumn: season = 'autumn' else: season = 'winter' timeAltID = row[3] monthName = monthList[month] rowLocation = preProcessing.getLocation(row[0]) rowTime = [ timeAltID, dayOfYear, day, month, monthName, year, season ] time = DimTime() location = DimLocation() factCovCase = FactCovCase() # fill the dimensions timeID = time.insert(rowTime) locationID = location.insert(rowLocation) # fill the dimensions # fill the fact table with foreign keys & mesures nbrOfCases = row[4] nbrOfDeath = row[5] nbrOfRecovered = row[6] row = [locationID, timeID, nbrOfCases, nbrOfDeath, nbrOfRecovered] factCovCase.insert(row) cpt += 1 print(cpt)
def getNBPValueDayBefore(value, curency, time): return getNBPValue(value, curency, (date.fromisoformat(time) - timedelta(days=1)).isoformat())
def latest_end_date() -> date: return date.fromisoformat("9999-12-31")
fop = open(fpt, "r") fco = fop.read() mat = re.search(tiexp, fco) if mat: title = mat.group(1) mat = re.search(tgexp, fco) if mat: found = mat.group(1) tags = [stg.replace(""", "").strip() for stg in found.split(",")] mat = re.search(dtexp, fco) if mat: found = mat.group(1) dte = date.fromisoformat(found) mat = re.search(coexp, fco, flags=re.DOTALL) if mat: fco = mat.group(1) articles.append({ "filename": filename, "title": title, "tags": tags, "date": dte, "content": fco, }) document = { "filename": filename,
def rank(flights, airports, groupName, googleGroupName, localeName, htmlFilePath): '''Our club ranking requires flight to originate locally, so airport must be in approved list''' nRecentTop = 15 nAllTop = 100 flightsLocal = deepcopy(flights) for i in range(len(flightsLocal)): if not airportInList(flightsLocal[i]['airport'], airports): flightsLocal[i]['points'] = 0.0 flightsLocal[i]['speed'] = 0.0 headerDict = { 'name': 'Name', 'localFlightDateStr': 'Date', 'points': 'Points km', 'pointsGain': 'Increased km', 'speed': 'Speed (pts/hr)', 'speedGain': 'Increased pts/hr', 'glider': 'Glider', 'airport': 'Airport', 'duration': 'Duration', 'url': 'Track' } '''All time''' topDistanceFlights = sort(flightsLocal, order=['points'])[::-1][:nAllTop] topSpeedFlights = sort(flightsLocal[flightsLocal['duration'] > 1.0], order=['speed'])[::-1][:nAllTop] note = 'From WeGlide, OnLineContest, and SkyLines. The sites have very similar scoring systems, called "Free", "OLC+" and "Score" respectively, and generally agree within 1-2 percent.' fields = [ 'name', 'points', 'localFlightDateStr', 'airport', 'duration', 'speed', 'glider', 'url' ] writeRankAll(topDistanceFlights, 'Top {} {} Distance Flights'.format(nAllTop, localeName), fields, headerDict, groupName, htmlFilePath, note) fields = [ 'name', 'speed', 'localFlightDateStr', 'airport', 'duration', 'points', 'glider', 'url' ] writeRankAll(topSpeedFlights, 'Top {} {} Speed Flights'.format(nAllTop, localeName), fields, headerDict, groupName, htmlFilePath, note) pilots = list(set(flightsLocal['name'])) totals = zeros(len(pilots), dtype=[('name', unicode_, 100), ('points', float32), ('duration', float32)]) for i, pilot in enumerate(pilots): totals[i]['name'] = pilot flights = flightsLocal[flightsLocal['name'] == pilot] totals[i]['points'] = sum(flights['points']) totals[i]['duration'] = sum(flights['duration']) totals = sort(totals, order=['points'])[::-1] fields = ['name', 'points', 'duration'] writeTotals(totals, 'Totals All {} Flights'.format(localeName), fields, headerDict, groupName, googleGroupName, htmlFilePath, note) '''Recent:''' nrecentMonths = 24 # months flightsRecent = deepcopy(flightsLocal) for i in range(len(flightsRecent)): if date.fromisoformat( flightsRecent[i]['localFlightDateStr'] ) < date.today() - timedelta(days=30 * nrecentMonths): flightsRecent[i]['points'] = 0.0 flightsRecent[i]['speed'] = 0.0 writeRankRecent(flightsRecent, nRecentTop, nrecentMonths, headerDict, localeName, groupName, htmlFilePath, note) writePersonal(flightsRecent, flightsLocal, nrecentMonths, headerDict, localeName, groupName, htmlFilePath, note)
def start_date(self): return date.fromisoformat(self.start)
def create_invoice_from_pretix(invoice, sender, order): invoice_date = date.fromisoformat(invoice["date"]) confirmed_payment_providers = [payment["provider"] for payment in order["payments"]] if not confirmed_payment_providers: print(f'Skipping {invoice["number"]}') return payment_provider = confirmed_payment_providers[-1] invoice_type = ( INVOICE_TYPES.TD04 if invoice["is_cancellation"] else INVOICE_TYPES.TD01 ) payment_method = ( PAYMENT_METHODS.MP05 if payment_provider == "banktransfer" else PAYMENT_METHODS.MP08 ) amount = sum([Decimal(line["gross_value"]) for line in invoice["lines"]]) tax_amount = sum([get_tax(line) for line in invoice["lines"]]) invoice_address = order["invoice_address"] first_name, *last_name_parts = invoice_address["name"].split(" ") last_name = " ".join(last_name_parts) address, _ = Address.objects.get_or_create( address=invoice_address["street"], postcode=invoice_address["zipcode"], city=invoice_address["city"], province=invoice_address["state"], country_code=invoice_address["country"], ) recipient_fiscal_code = "" tax_code = "" if invoice_address["country"].lower() == "it": # if we are sending invoices to an italian recipient # we need to check if they have a vat number if invoice_address["is_business"]: # in that case the recipient_code should be set on the order # and our tax_code becomes the VAT number recipient_code = "" # TODO (internal reference field?) tax_code = invoice_address["vat_id"] if not tax_code: raise MissingTaxCodeError(order) else: # otherwise the recipient_code is 0000000 # and our recipient_fiscal_code becomes the italian fiscal code recipient_code = "0000000" recipient_fiscal_code = invoice_address["internal_reference"] if not recipient_fiscal_code: raise MissingFiscalCodeError(order) else: recipient_code = "XXXXXXX" tax_code = "99999999999" invoice_object, created = Invoice.objects.update_or_create( sender=sender, invoice_number=invoice["number"], defaults={ "invoice_type": invoice_type, "is_business": invoice_address["is_business"], "invoice_currency": "EUR", "invoice_date": invoice_date, "invoice_deadline": invoice_date + timedelta(days=30), # TODO: should be invoice["lines"][0]["tax_rate"] but hotels are broken "invoice_tax_rate": "22.00", "invoice_amount": amount, "invoice_tax_amount": tax_amount, "transmission_format": TRANSMISSION_FORMATS.FPR12, "payment_condition": PAYMENT_CONDITIONS.TP02, "payment_method": payment_method, "recipient_denomination": invoice_address.get("company") or "", "recipient_first_name": first_name, "recipient_last_name": last_name, "recipient_address": address, "recipient_tax_code": tax_code or recipient_fiscal_code, "recipient_code": recipient_code, }, ) if not created: invoice_object.items.all().delete() for line in invoice["lines"]: Item.objects.create( row=line["position"], description=line["description"], quantity=1, # prices need to be vat_excluded unit_price=float(line["gross_value"]) - get_tax(line), # TODO: should be line["tax_rate"] but hotels are broken vat_rate="22.00", invoice=invoice_object, )
def end_date(self): if self.end == 'today': return date.today() return date.fromisoformat(self.end)
def parse_dte_xml(xml_doc: XmlElement) -> data_models.DteDataL2: """ Parse data from a DTE XML doc. .. warning:: It is assumed that ``xml_doc`` is an ``{http://www.sii.cl/SiiDte}/DTE`` XML element. :raises ValueError: :raises TypeError: :raises NotImplementedError: """ # TODO: change response type to a dataclass like 'DteXmlData'. # TODO: separate the XML parsing stage from the deserialization stage, which could be # performed by XML-agnostic code (perhaps using Marshmallow or data clacases?). # See :class:`cl_sii.rcv.parse.RcvCsvRowSchema`. if not isinstance(xml_doc, (XmlElement, XmlElementTree)): raise TypeError("'xml_doc' must be an 'XmlElement'.") xml_em = xml_doc ########################################################################### # XML elements finding ########################################################################### # Schema requires one, and only one, of these: # a) 'Documento' # b) 'Liquidacion' # c) 'Exportaciones' documento_em = xml_em.find( 'sii-dte:Documento', # "Informacion Tributaria del DTE" namespaces=DTE_XMLNS_MAP) liquidacion_em = xml_em.find( 'sii-dte:Liquidacion', # "Informacion Tributaria de Liquidaciones" namespaces=DTE_XMLNS_MAP) exportaciones_em = xml_em.find( 'sii-dte:Exportaciones', # "Informacion Tributaria de exportaciones" namespaces=DTE_XMLNS_MAP) signature_em = xml_em.find( 'ds:Signature', # "Firma Digital sobre Documento" namespaces=xml_utils.XML_DSIG_NS_MAP) if liquidacion_em is not None or exportaciones_em is not None: raise NotImplementedError("XML element 'Documento' is the only one supported.") if documento_em is None: raise ValueError("Top level XML element 'Document' is required.") # This value seems to be worthless (only useful for internal references in the XML doc). # e.g. 'MiPE76354771-13419', 'MiPE76399752-6048' # documento_em_id = documento_em.attrib['ID'] # 'Documento' # Excluded elements (optional according to the XML schema but the SII may require some of these # depending on 'tipo_dte' and other criteria): # - 'Detalle': (occurrences: 0..60) # "Detalle de Itemes del Documento" # - 'SubTotInfo': (occurrences: 0..20) # "Subtotales Informativos" # - 'DscRcgGlobal': (occurrences: 0..20) # "Descuentos y/o Recargos que afectan al total del Documento" # - 'Referencia': (occurrences: 0..40) # "Identificacion de otros documentos Referenciados por Documento" # - 'Comisiones': (occurrences: 0..20) # "Comisiones y otros cargos es obligatoria para Liquidaciones Factura" encabezado_em = documento_em.find( 'sii-dte:Encabezado', # "Identificacion y Totales del Documento" namespaces=DTE_XMLNS_MAP) # note: excluded because currently it is not useful. # ted_em = documento_em.find( # 'sii-dte:TED', # "Timbre Electronico de DTE" # namespaces=DTE_XMLNS_MAP) tmst_firma_em = documento_em.find( 'sii-dte:TmstFirma', # "Fecha y Hora en que se Firmo Digitalmente el Documento" namespaces=DTE_XMLNS_MAP) # 'Documento.Encabezado' # Excluded elements (optional according to the XML schema but the SII may require some of these # depending on 'tipo_dte' and other criteria): # - 'RUTMandante': # "RUT a Cuenta de Quien se Emite el DTE" # - 'RUTSolicita': # "RUT que solicita el DTE en Venta a Publico" # - 'Transporte': # "Informacion de Transporte de Mercaderias" # - 'OtraMoneda': # "Otra Moneda" id_doc_em = encabezado_em.find( 'sii-dte:IdDoc', # "Identificacion del DTE" namespaces=DTE_XMLNS_MAP) emisor_em = encabezado_em.find( 'sii-dte:Emisor', # "Datos del Emisor" namespaces=DTE_XMLNS_MAP) receptor_em = encabezado_em.find( 'sii-dte:Receptor', # "Datos del Receptor" namespaces=DTE_XMLNS_MAP) totales_em = encabezado_em.find( 'sii-dte:Totales', # "Montos Totales del DTE" namespaces=DTE_XMLNS_MAP) # 'Documento.Encabezado.IdDoc' # Excluded elements (optional according to the XML schema but the SII may require some of these # depending on 'tipo_dte' and other criteria): # - 'IndNoRebaja': # "Nota de Credito sin Derecho a Descontar Debito" # - 'TipoDespacho': # "Indica Modo de Despacho de los Bienes que Acompanan al DTE" # - 'IndTraslado': # "Incluido en Guias de Despacho para Especifiicar el Tipo de Traslado de Productos" # - 'TpoImpresion': # "Tipo de impresión N (Normal) o T (Ticket)" # - 'IndServicio': # "Indica si Transaccion Corresponde a la Prestacion de un Servicio" # - 'MntBruto': # "Indica el Uso de Montos Brutos en Detalle" # - 'TpoTranCompra': # "Tipo de Transacción para el comprador" # - 'TpoTranVenta': # "Tipo de Transacción para el vendedor" # - 'FmaPago': # "Forma de Pago del DTE" # - 'FmaPagExp': # "Forma de Pago Exportación Tabla Formas de Pago de Aduanas" # - 'FchCancel': # "Fecha de Cancelacion del DTE" # - 'MntCancel': # "Monto Cancelado al emitirse el documento" # - 'SaldoInsol': # "Saldo Insoluto al emitirse el documento" # - 'MntPagos': (occurrences: 0..30) # "Tabla de Montos de Pago" # - 'PeriodoDesde': # "Periodo de Facturacion - Desde" # - 'PeriodoHasta': # "Periodo Facturacion - Hasta" # - 'MedioPago': # "Medio de Pago" # - 'TpoCtaPago': # "Tipo Cuenta de Pago" # - 'NumCtaPago': # "Número de la cuenta del pago" # - 'BcoPago': # "Banco donde se realiza el pago" # - 'TermPagoCdg': # "Codigo del Termino de Pago Acordado" # - 'TermPagoGlosa': # "Términos del Pago - glosa" # - 'TermPagoDias': # "Dias de Acuerdo al Codigo de Termino de Pago" # (required): tipo_dte_em = id_doc_em.find( 'sii-dte:TipoDTE', # "Tipo de DTE" namespaces=DTE_XMLNS_MAP) folio_em = id_doc_em.find( 'sii-dte:Folio', # "Folio del Documento Electronico" namespaces=DTE_XMLNS_MAP) fecha_emision_em = id_doc_em.find( 'sii-dte:FchEmis', # "Fecha Emision Contable del DTE" namespaces=DTE_XMLNS_MAP) # (optional): fecha_vencimiento_em = id_doc_em.find( 'sii-dte:FchVenc', # "Fecha de Vencimiento del Pago" namespaces=DTE_XMLNS_MAP) # 'Documento.Encabezado.Emisor' # Excluded elements (optional according to the XML schema but the SII may require some of these # depending on 'tipo_dte' and other criteria): # - 'Telefono': (occurrences: 0..2) # "Telefono Emisor" # - 'Acteco': (occurrences: 0..4) # "Codigo de Actividad Economica del Emisor Relevante para el DTE" # - 'GuiaExport': # "Emisor de una Guía de despacho para Exportación" # - 'Sucursal': # "Sucursal que Emite el DTE" # - 'CdgSIISucur': # "Codigo de Sucursal Entregado por el SII" # - 'DirOrigen': # "Direccion de Origen" # - 'CmnaOrigen': # "Comuna de Origen" # - 'CiudadOrigen': # "Ciudad de Origen" # - 'CdgVendedor': # "Codigo del Vendedor" # - 'IdAdicEmisor': # "Identificador Adicional del Emisor" # (required): emisor_rut_em = emisor_em.find( 'sii-dte:RUTEmisor', # "RUT del Emisor del DTE" namespaces=DTE_XMLNS_MAP) emisor_razon_social_em = emisor_em.find( 'sii-dte:RznSoc', # "Nombre o Razon Social del Emisor" namespaces=DTE_XMLNS_MAP) emisor_giro_em = emisor_em.find( 'sii-dte:GiroEmis', # "Giro Comercial del Emisor Relevante para el DTE" namespaces=DTE_XMLNS_MAP) # (optional): emisor_email_em = emisor_em.find( 'sii-dte:CorreoEmisor', # "Correo Elect. de contacto en empresa del receptor" (wrong!) namespaces=DTE_XMLNS_MAP) # 'Documento.Encabezado.Receptor' # Excluded elements (optional according to the XML schema but the SII may require some of these # depending on 'tipo_dte' and other criteria): # - 'CdgIntRecep': # "Codigo Interno del Receptor" # - 'Extranjero': # "Receptor Extranjero" # - 'GiroRecep': # "Giro Comercial del Receptor" # - 'Contacto': # "Telefono o E-mail de Contacto del Receptor" # - 'CorreoRecep': # "Correo Elect. de contacto en empresa del receptor" # - 'DirRecep': # "Direccion en la Cual se Envian los Productos o se Prestan los Servicios" # - 'CmnaRecep': # "Comuna de Recepcion" # - 'CiudadRecep': # "Ciudad de Recepcion" # - 'DirPostal': # "Direccion Postal" # - 'CmnaPostal': # "Comuna Postal" # - 'CiudadPostal': # "Ciudad Postal" # (required): receptor_rut_em = receptor_em.find( 'sii-dte:RUTRecep', # "RUT del Receptor del DTE" namespaces=DTE_XMLNS_MAP) receptor_razon_social_em = receptor_em.find( 'sii-dte:RznSocRecep', # "Nombre o Razon Social del Receptor" namespaces=DTE_XMLNS_MAP) # (optional): receptor_email_em = emisor_em.find( 'sii-dte:CorreoRecep', # "Correo Elect. de contacto en empresa del receptor" namespaces=DTE_XMLNS_MAP) # 'Documento.Encabezado.Totales' # Excluded elements (optional according to the XML schema but the SII may require some of these # depending on 'tipo_dte' and other criteria): # - 'MntNeto': # "Monto Neto del DTE" # - 'MntExe': # "Monto Exento del DTE" # - 'MntBase': # "Monto Base Faenamiento Carne" (???) # - 'MntMargenCom': # "Monto Base de Márgenes de Comercialización. Monto informado" # - 'TasaIVA': # "Tasa de IVA" (percentage) # - 'IVA': # "Monto de IVA del DTE" # - 'IVAProp': # "Monto del IVA propio" # - 'IVATerc': # "Monto del IVA de Terceros" # - 'ImptoReten': (occurrences: 0..20) # "Impuestos y Retenciones Adicionales" # - 'IVANoRet': # "IVA No Retenido" # - 'CredEC': # "Credito Especial Empresas Constructoras" # - 'GrntDep': # "Garantia por Deposito de Envases o Embalajes" # - 'Comisiones': # "Comisiones y otros cargos es obligatoria para Liquidaciones Factura" # - 'MontoNF': # "Monto No Facturable - Corresponde a Bienes o Servicios Facturados Previamente" # - 'MontoPeriodo': # "Total de Ventas o Servicios del Periodo" # - 'SaldoAnterior': # "Saldo Anterior - Puede ser Negativo o Positivo" # - 'VlrPagar': # "Valor a Pagar Total del documento" monto_total_em = totales_em.find( 'sii-dte:MntTotal', # "Monto Total del DTE" namespaces=DTE_XMLNS_MAP) # 'Signature' # signature_signed_info_em = signature_em.find( # 'ds:SignedInfo', # "Descripcion de la Informacion Firmada y del Metodo de Firma" # namespaces=xml_utils.XML_DSIG_NS_MAP) # signature_signed_info_canonicalization_method_em = signature_signed_info_em.find( # 'ds:CanonicalizationMethod', # "Algoritmo de Canonicalizacion" # namespaces=xml_utils.XML_DSIG_NS_MAP) # signature_signed_info_signature_method_em = signature_signed_info_em.find( # 'ds:SignatureMethod', # "Algoritmo de Firma" # namespaces=xml_utils.XML_DSIG_NS_MAP) # signature_signed_info_reference_em = signature_signed_info_em.find( # 'ds:Reference', # "Referencia a Elemento Firmado" # namespaces=xml_utils.XML_DSIG_NS_MAP) signature_signature_value_em = signature_em.find( 'ds:SignatureValue', # "Valor de la Firma Digital" namespaces=xml_utils.XML_DSIG_NS_MAP) signature_key_info_em = signature_em.find( 'ds:KeyInfo', # "Informacion de Claves Publicas y Certificado" namespaces=xml_utils.XML_DSIG_NS_MAP) # signature_key_info_key_value_em = signature_key_info_em.find( # 'ds:KeyValue', # namespaces=xml_utils.XML_DSIG_NS_MAP) signature_key_info_x509_data_em = signature_key_info_em.find( 'ds:X509Data', # "Informacion del Certificado Publico" namespaces=xml_utils.XML_DSIG_NS_MAP) signature_key_info_x509_cert_em = signature_key_info_x509_data_em.find( 'ds:X509Certificate', # "Certificado Publico" namespaces=xml_utils.XML_DSIG_NS_MAP) ########################################################################### # values parsing ########################################################################### tipo_dte_value = constants.TipoDteEnum(int(_text_strip_or_raise(tipo_dte_em))) folio_value = int(_text_strip_or_raise(folio_em)) fecha_emision_value = date.fromisoformat(_text_strip_or_raise(fecha_emision_em)) fecha_vencimiento_value = None if fecha_vencimiento_em is not None: fecha_vencimiento_value = date.fromisoformat( _text_strip_or_raise(fecha_vencimiento_em)) emisor_rut_value = Rut(_text_strip_or_raise(emisor_rut_em)) emisor_razon_social_value = _text_strip_or_raise(emisor_razon_social_em) emisor_giro_value = _text_strip_or_raise(emisor_giro_em) emisor_email_value = None if emisor_email_em is not None: emisor_email_value = _text_strip_or_none(emisor_email_em) receptor_rut_value = Rut(_text_strip_or_raise(receptor_rut_em)) receptor_razon_social_value = _text_strip_or_raise(receptor_razon_social_em) receptor_email_value = None if receptor_email_em is not None: receptor_email_value = _text_strip_or_none(receptor_email_em) monto_total_value = int(_text_strip_or_raise(monto_total_em)) tmst_firma_value = tz_utils.convert_naive_dt_to_tz_aware( dt=datetime.fromisoformat(_text_strip_or_raise(tmst_firma_em)), tz=data_models.DteDataL2.DATETIME_FIELDS_TZ) signature_signature_value = encoding_utils.decode_base64_strict( _text_strip_or_raise(signature_signature_value_em)) signature_key_info_x509_cert_der = encoding_utils.decode_base64_strict( _text_strip_or_raise(signature_key_info_x509_cert_em)) return data_models.DteDataL2( emisor_rut=emisor_rut_value, tipo_dte=tipo_dte_value, folio=folio_value, fecha_emision_date=fecha_emision_value, receptor_rut=receptor_rut_value, monto_total=monto_total_value, emisor_razon_social=emisor_razon_social_value, receptor_razon_social=receptor_razon_social_value, fecha_vencimiento_date=fecha_vencimiento_value, firma_documento_dt=tmst_firma_value, signature_value=signature_signature_value, signature_x509_cert_der=signature_key_info_x509_cert_der, emisor_giro=emisor_giro_value, emisor_email=emisor_email_value, receptor_email=receptor_email_value, )
from statements.income_statement import IncomeStatement from cc.categories import * from tests.mocks.liabilities.timed_liability_mock import * from tests.mocks.cc.statement_mock import * from tests.mocks.paystubs.wages_mock import * from pytest import fixture, mark from decimal import Decimal from datetime import date @fixture(params=[(date.fromisoformat('1970-01-01'), date.fromisoformat('1971-07-31'))]) def income_statement(request): return IncomeStatement(beginning=request.param[0], ending=request.param[1]) class TestIncomeStatement: def test_add_paystub(self, income_statement): paystub = paystub_mock('1970-01-01', '1970-02-01') paystub.earnings = earnings_mock(wages=100, bonuses=20) paystub.deductions = deductions_mock(total=50) paystub.taxes = taxes_mock(total=10) income_statement.add_paystub(paystub) assert 100 == income_statement.revenue.salaries assert 20 == income_statement.revenue.bonuses assert -50 == income_statement.expenses.ebit.deductions assert -10 == income_statement.expenses.it.taxes def test_add_paystub_date_out_of_range(self, income_statement): earnings = earnings_mock(wages=100, bonuses=20)
from multiDB import TableManager from datetime import date stocks = open("stocks.txt").read().split(",") tb = TableManager("TSLA") tb.deleteFromSaisonal() for stock in stocks: tb = TableManager(stock) latestDate = date.fromisoformat(tb.getLatestDate()) year = latestDate.year month = latestDate.month - 1 while year != latestDate.year - 10 or month != latestDate.month: monthData = tb.getDataForRanking(str(month), str(year)) diff = monthData[-1][1] - monthData[0][1] gain = diff / monthData[0][1] tb.insertSaisonal(tb.getSymbol(), year, month, gain) if month == 1: year -= 1 month = 12 else: month -= 1 tb.commit()
def _forward_indices(indices: list, window: int): ''' Adds `window` indices to a list of dates ''' date_indices = [date.fromisoformat(idx) for idx in indices] for _ in range(window): date_indices.append(date_indices[-1] + timedelta(days=1)) return [idx.isoformat() for idx in date_indices]
# date from timestamp import time t = time.time() print(t) d = date.fromtimestamp(t) print(d) d = date.fromtimestamp(1537261418) print(d) # date from ordinal d = date.fromordinal(366) print(d) # date from ISO string format, added in Python 3.7 d = date.fromisoformat('2018-09-19') print(d) # date class attributes print(date.min) print(date.max) print(date.resolution) # instance attributes, read only d = date.today() print(d.year) print(d.month) print(d.day) # date operations with timedelta date_tomorrow = date.today() + timedelta(days=1)
async def _parse_source_responses(self, responses: SourceResponses) -> SourceMeasurement: """Extend to calculate how many days ago the jobs were built.""" measurement = await super()._parse_source_responses(responses) build_dates = [entity["build_date"] for entity in measurement.entities if entity["build_date"]] measurement.value = str((date.today() - date.fromisoformat(max(build_dates))).days) if build_dates else None return measurement
parser = ArgumentParser(description="Weekly earnings calculator") parser.add_argument("-d", "--date", metavar="IS08601_DATE", help="First day of the billing cycle") parser.add_argument("-e", "--entry", metavar="H1:M1,M2", dest="entries", help="hour1:min1,min2", action="append") parser.add_argument("-r", "--rate", type=Decimal, help="Hourly rate (overrides the RATE env var)") args = parser.parse_args() weekly_earnings = Decimal(0) total_time_tracked_in_mins = 0 dt = args.date one_day = timedelta(days=1) if dt: dt = date.fromisoformat(dt) else: dt = date.today() while dt.weekday(): # 0=Monday dt -= one_day hr_rate = args.rate or Decimal(os.environ.get('RATE', DEF_HR_RATE)) for day_entries in args.entries: day_mins = 0 for entry in day_entries.split(ENTRY_SEP): for match in ENTRY_RE.finditer(entry): h_or_m, m = match.group(1, 2) if m: day_mins += int(h_or_m)*60 + int(m) else:
# Use datetime.date.today() # datetime.date class has the following integer attributes, date(year, month, day) # get day of the week using date.weekday() # Monday is 0 from datetime import date d1 = date.today() print(d1) print(d1.month, d1.day, d1.year) print(d1.weekday()) # ISO format is a string format, yyyy-mm-dd # --------------------------- # date_object.isoformat() does the same thing as str(date_object) from datetime import date d1 = date.fromisoformat('2011-11-23') print(d1) print(str(d1)) print(d1.isoformat()) d1 # Comparison, addition and sutraction of dates # --------------------------- # Comparison gives boolean result. Later date is greater than earlier date. # Date addition & subtraction give result as a datetime.timedelta object (explained more below). # The same comparison and add/subtract operations can be used with time objects. from datetime import date d1 = date.today() d2 = date(2015, 5, 14) print(d1 > d2)