def cron_courts(): try: res = get(ROOT_URL + LIST_COURTS) soup = BeautifulSoup(res.text, 'html.parser') Court.objects.get_or_create(id=SUPREME_COURT, name='Nejvyšší soud') Court.objects.get_or_create(id=SUPREME_ADMINISTRATIVE_COURT, name='Nejvyšší správní soud') upper = soup.find(id='kraj').find_all('option')[1:] lower = soup.find(id='soudy').find_all('option')[1:] for court in upper + lower: Court.objects.get_or_create(id=court['value'], name=court.string.encode('utf-8')) except: # pragma: no cover LOGGER.warning('Error importing courts') Court.objects.all().update(reports=None) for court in Court.objects.all(): if isreg(court): try: sleep(1) res = get(ROOT_URL + LIST_REPORTS.format(court.pk)) soup = BeautifulSoup(res.text, 'xml') for item in soup.find_all('okresniSoud'): Court.objects.filter(pk=item.id.string).update(reports=court) except: # pragma: no cover LOGGER.warning('Error setting hierarchy for {}'.format(court.id)) LOGGER.info('Courts imported')
def cron_run(): now = datetime.now() expired = Lock.objects.filter(timestamp_add__lt=(now - EXPIRE)) for lock in expired: LOGGER.warning('Expired lock "{}" deleted'.format(lock.name)) expired.delete() for job in Pending.objects.order_by('timestamp_add'): if not Pending.objects.filter(pk=job.id).exists(): continue lock = job.lock if not Lock.objects.filter(name=lock).exists(): job.delete() Lock.objects.get_or_create(name=lock) args = getattr(job, 'args', '') run(job.name, args) Lock.objects.filter(name=lock).delete() LOGGER.debug( 'Scheduled job {} with arguments "{}" completed'.format( job.name, args)) for job in SCHED: if job['when'](now): args = job.get('args', '') if 'lock' in job: lock = job['lock'] if Lock.objects.filter(name=lock).exists(): if LOG_LOCKS: LOGGER.debug('Lock "{}" exists'.format(lock)) if job['blocking']: Pending(name=job['name'], args=args, lock=lock).save() LOGGER.debug( 'Job {} with arguments "{}" scheduled'.format( job['name'], args)) continue elif LOG_LOCKS: LOGGER.debug('Lock "{}" does not exist'.format(lock)) Lock.objects.get_or_create(name=lock) if LOG_LOCKS: LOGGER.debug('Lock "{}" set'.format(lock)) run(job['name'], args) if 'lock' in job: reslock = Lock.objects.filter(name=lock) if reslock.exists(): reslock.delete() if LOG_LOCKS: LOGGER.debug('Lock "{}" reset'.format(lock))
def cron_run(): now = datetime.now() expired = Lock.objects.filter(timestamp_add__lt=(now - EXPIRE)) for lock in expired: LOGGER.warning('Expired lock "{}" deleted'.format(lock.name)) expired.delete() for job in Pending.objects.order_by('timestamp_add'): if not Pending.objects.filter(pk=job.id).exists(): continue lock = job.lock if not Lock.objects.filter(name=lock).exists(): job.delete() Lock.objects.get_or_create(name=lock) args = getattr(job, 'args', '') run(job.name, args) Lock.objects.filter(name=lock).delete() LOGGER.debug('Scheduled job {} with arguments "{}" completed'.format(job.name, args)) for job in SCHED: if job['when'](now): args = job.get('args', '') if 'lock' in job: lock = job['lock'] if Lock.objects.filter(name=lock).exists(): if LOG_LOCKS: LOGGER.debug('Lock "{}" exists'.format(lock)) if job['blocking']: Pending( name=job['name'], args=args, lock=lock ).save() LOGGER.debug('Job {} with arguments "{}" scheduled'.format(job['name'], args)) continue elif LOG_LOCKS: LOGGER.debug('Lock "{}" does not exist'.format(lock)) Lock.objects.get_or_create(name=lock) if LOG_LOCKS: LOGGER.debug('Lock "{}" set'.format(lock)) run(job['name'], args) if 'lock' in job: reslock = Lock.objects.filter(name=lock) if reslock.exists(): reslock.delete() if LOG_LOCKS: LOGGER.debug('Lock "{}" reset'.format(lock))
def cron_courtrooms(): for court in Court.objects.exclude(id=SUPREME_ADMINISTRATIVE_COURT): try: sleep(1) res = get(LIST_COURTROOMS.format(court.pk)) soup = BeautifulSoup(res.text, 'xml') for room in soup.find_all('jednaciSin'): croom, croomc = Courtroom.objects.get_or_create( court=court, desc=room.nazev.string) if not croomc: croom.save() except: # pragma: no cover LOGGER.warning('Error downloading courtrooms') LOGGER.info('Courtrooms downloaded')
def cron_find(): now = datetime.now() try: dec = Decision.objects.filter(anonfilename='', date__gte=(now - OBS)).earliest('updated') dec.updated = now dec.save() res = get(FIND_URL) soup = BeautifulSoup(res.text, 'html.parser') form = soup.find('form') dct = {i['name']: i['value'] for i in form.find_all('input') if i['type'] == 'hidden' and i.has_attr('value')} ref = ('{} '.format(dec.senate) if dec.senate else '') ref += '{0.register} {0.number:d}/{0.year:d}'.format(dec) dct['_ctl0:ContentPlaceMasterPage:_ctl0:txtDatumOd'] = dct['_ctl0:ContentPlaceMasterPage:_ctl0:txtDatumDo'] = \ '{0.day:02d}.{0.month:02d}.{0.year:d}'.format(dec.date) dct['_ctl0:ContentPlaceMasterPage:_ctl0:txtSpisovaZnackaFull'] = ref dct['_ctl0_ContentPlaceMasterPage__ctl0_rbTypDatum_0'] = 'on' res = post(FIND_URL, dct) soup = BeautifulSoup(res.text, 'html.parser') for anchor in soup.select('table#_ctl0_ContentPlaceMasterPage__ctl0_grwA')[0].select('a[title^=Anonymizovan]'): fileurl = anchor['href'] filename = fileurl.split('/')[-1] if not FRE.match(filename): continue res = get(ROOT_URL + fileurl) if not res.ok: continue LOGGER.info( 'Writing anonymized decision "{}"' .format(composeref(dec.senate, dec.register, dec.number, dec.year))) with open(join(REPO_PREF, filename), 'wb') as outfile: if not outfile.write(res.content): # pragma: no cover LOGGER.error( 'Failed to write anonymized decision "{}"' .format(composeref(dec.senate, dec.register, dec.number, dec.year))) return adddoc(APP, filename, ROOT_URL + fileurl) dec.anonfilename = filename dec.save() return except: # pragma: no cover LOGGER.warning('Find failed')
def updateproc(proc): notnew = bool(proc.updated) proc.updated = datetime.now() proc.save() court = proc.court_id try: if court == SUPREME_ADMINISTRATIVE_COURT: addauxid(proc) if not proc.auxid: return url = NSS_GET_PROC.format(proc.auxid) res = get(url) soup = BeautifulSoup(res.text, 'html.parser') table = soup.find('table', 'frm') else: court_type = 'ns' if court == SUPREME_COURT else 'os' url = ROOT_URL + GET_PROC.format( court, proc.court.reports.id if proc.court.reports else proc.court.id, proc.senate, quote(proc.register.upper()), proc.number, proc.year, court_type) res = get(url) soup = BeautifulSoup(res.text, 'html.parser') table = soup.find('tr', 'AAAA') assert table except: # pragma: no cover LOGGER.warning( 'Failed to check proceedings "{0.desc}" ({1}) for user "{2}" ({0.uid_id:d})' .format(proc, p2s(proc), User.objects.get(pk=proc.uid_id).username)) return False hsh = md5(str(table).encode()).hexdigest() if court != SUPREME_ADMINISTRATIVE_COURT: changed = None try: tbl = table.find_next_sibling().find_next_sibling().table.tr.td.find_next_sibling().text.split() if len(tbl) == 4: changed = datetime(*map(int, list(reversed(tbl[0].split('.'))) + tbl[1].split(':'))) except: # pragma: no cover LOGGER.warning( 'Failed to check proceedings "{0.desc}" ({1}) for user "{2}" ({0.uid_id:d})' .format(proc, p2s(proc), User.objects.get(pk=proc.uid_id).username)) if changed != proc.changed or hsh != proc.hash: proc.notify |= notnew if changed: proc.changed = changed LOGGER.info( 'Change detected in proceedings "{0.desc}" ({1}) for user "{2}" ({0.uid_id:d})' .format(proc, p2s(proc), User.objects.get(pk=proc.uid_id).username)) elif hsh != proc.hash: proc.notify |= notnew if notnew: proc.changed = proc.updated if proc.changed: LOGGER.info( 'Change detected in proceedings "{0.desc}" ({1}) for user "{2}" ({0.uid_id:d})' .format(proc, p2s(proc), User.objects.get(pk=proc.uid_id).username)) proc.hash = hsh LOGGER.debug( 'Proceedings "{0.desc}" ({1}) updated for user "{2}" ({0.uid_id:d})' .format(proc, p2s(proc), User.objects.get(pk=proc.uid_id).username)) return True
def cron_update(): tasks = Task.objects.all() if not tasks.exists(): return task = tasks.earliest('timestamp_update') task.save() court0 = 'os' if task.court.reports: court1 = task.court.reports.id court2 = task.court.id else: court1 = task.court.id court2 = '' tdate = str(task.date) try: for croom in Courtroom.objects.filter(court=task.court): query = QueryDict(mutable=True) query['type'] = 'jednani' query['typSoudu'] = court0 query['krajOrg'] = court1 query['org'] = court2 query['sin'] = croom.desc query['datum'] = '{0.day:d}.{0.month:d}.{0.year:d}'.format(task.date) query['spamQuestion'] = '23' query['druhVec'] = '' url = ROOT_URL + GET_HEARINGS + query.urlencode() sleep(1) res = get(url) soup = BeautifulSoup(res.text, 'html.parser') sched = soup.select('table tr td + td table tr td table tr')[6] if sched.select('b'): continue for ttr in sched.td.table.children: try: ttd = ttr.td ttm = ttd.text.split(':') ttm = datetime( task.date.year, task.date.month, task.date.day, int(ttm[0]), int(ttm[1])) ttd = ttd.find_next_sibling('td') senate, register, number, year = decomposeref(ttd.text.replace(' / ', '/')) register = normreg(register) ttd = ttd.find_next_sibling('td') form = Form.objects.get_or_create(name=ttd.text.strip())[0] ttd = ttd.find_next_sibling('td') judge = Judge.objects.get_or_create(name=ttd.text.strip())[0] ttd = ttd.find_next_sibling('td') parties = ttd.select('td') ttd = ttd.find_next_sibling('td') closed = 'Ano' in ttd.text ttd = ttd.find_next_sibling('td') cancelled = 'Ano' in ttd.text hearing = Hearing.objects.update_or_create( courtroom=croom, time=ttm, senate=senate, register=register, number=number, year=year, form=form, judge=judge, defaults={ 'closed': closed, 'cancelled': cancelled}) if hearing[1]: for query in parties: qts = query.text.strip() if qts: party = Party.objects.get_or_create(name=query.text.strip())[0] hearing[0].parties.add(party) sur_check( {'check_psj': True}, qts, task.court, senate, register, number, year, HEARING_URL.format( task.court.id, senate, quote(register), number, year, tdate, tdate)) except: pass task.delete() except: LOGGER.warning( 'Failed to download hearings for {0}, {1.year:d}-{1.month:02d}-{1.day:02d}' .format(task.court_id, task.date)) return LOGGER.debug( 'Downloaded hearings for {0}, {1.year:d}-{1.month:02d}-{1.day:02d}'.format(task.court_id, task.date))
def cron_update2(): nss = Court.objects.get(pk=SUPREME_ADMINISTRATIVE_COURT) croom = Courtroom.objects.get_or_create(court=nss, desc='(neuvedeno)')[0] form = Form.objects.get_or_create(name='Veřejné jednání')[0] try: res = get(LIST_COURTROOMS2) soup = BeautifulSoup(res.text, 'html.parser') for item in soup.select('table.item'): for hearing in Hearing.objects.filter(courtroom__court=nss, auxid=0): hearing.auxid = getauxid(hearing.senate, hearing.register, hearing.number, hearing.year) hearing.save() try: senate = register = number = year = judge = ttm = None parties = [] for trow in item.select('tr'): ths = trow.th.text.strip() tds = trow.td.text.strip() if ths.startswith('Spisová značka:'): senate, register, number, year = decomposeref(tds) elif ths.startswith('Účastníci řízení:'): for query in trow.td: if 'strip' in dir(query): party = Party.objects.get_or_create(name=query.strip())[0] parties.append(party) elif ths.startswith('Předseda senátu:'): judge = Judge.objects.get_or_create(name=tds)[0] elif ths.startswith('Datum jednání:'): dtm = tds.split() dat = list(map(int, dtm[0].split('.'))) tim = list(map(int, dtm[2].split(':'))) ttm = datetime(dat[2], dat[1], dat[0], tim[0], tim[1]) auxid = getauxid(senate, register, number, year) hearing = Hearing.objects.update_or_create( courtroom=croom, time=ttm, senate=senate, register=register, number=number, year=year, form=form, judge=judge, closed=False, cancelled=False, auxid=auxid) if hearing[1]: for party in parties: hearing[0].parties.add(party) sur_check( {'check_psj': True}, party.name, nss, senate, register, number, year, HEARING_URL.format( nss.id, senate, quote(register), number, year, ttm.date(), ttm.date())) except: # pragma: no cover pass except: # pragma: no cover LOGGER.warning('Supreme Administrative Court update failed') LOGGER.debug('Downloaded Supreme Administrative Court hearings')
def cron_update(): nss = Court.objects.get(pk=SUPREME_ADMINISTRATIVE_COURT) try: res = get(FORM_URL) soup = BeautifulSoup(res.text, 'html.parser') form = soup.find('form') dct = {i['name']: i['value'] for i in form.find_all('input') if i['type'] == 'hidden' and i.has_attr('value')} while True: dct['_ctl0:ContentPlaceMasterPage:_ctl0:ddlSortName'] = '5' dct['_ctl0:ContentPlaceMasterPage:_ctl0:ddlSortDirection'] = '1' res = post(FORM_URL, dct) soup = BeautifulSoup(res.text, 'html.parser') for item in soup.select('table.item'): try: ttr = item.select('tr') senate, register, number, year, page = decomposeref(ttr[0].td.text.strip()) if Decision.objects.filter( senate=senate, register=register, number=number, year=year, page=page).exists(): continue fileurl = ttr[4].a['href'] filename = fileurl.split('/')[-1] if not FRE.match(filename): continue res = get(ROOT_URL + fileurl) if not res.ok: continue LOGGER.info('Writing abridged decision "{}"'.format(composeref(senate, register, number, year))) with open(join(REPO_PREF, filename), 'wb') as outfile: if not outfile.write(res.content): # pragma: no cover LOGGER.error( 'Failed to write abridged decision "{}"' .format(composeref(senate, register, number, year))) continue adddoc(APP, filename, ROOT_URL + fileurl) agenda = Agenda.objects.get_or_create(desc=ttr[2].td.text.strip())[0] dat = date(*map(int, list(reversed(ttr[3].td.text.split('.'))))) dec = Decision( senate=senate, register=register, number=number, year=year, page=page, agenda=agenda, date=dat, filename=filename) dec.save() for query in ttr[1].td: if 'strip' in dir(query): qstrip = query.strip() party = Party.objects.get_or_create(name=qstrip)[0] dec.parties.add(party) sur_check( {'check_udn': True}, qstrip, nss, senate, register, number, year, DEC_URL.format(senate, quote(register), number, year, page)) except: # pragma: no cover pass pagers = soup.select('div#PagingBox2')[0] cpag = int(pagers.b.text[1:-1]) pager = pagers.select('a') if cpag > len(pager): break form = soup.find('form') dct = {i['name']: i['value'] for i in form.find_all('input') if i['type'] == 'hidden' and i.has_attr('value')} dct['__EVENTTARGET'] = pager[cpag - 1]['href'][70:-34] dct['__EVENTARGUMENT'] = '' except: # pragma: no cover LOGGER.warning('Update failed')
def cron_update(): tasks = Task.objects.all() if not tasks.exists(): return task = tasks.earliest('timestamp_update') task.save() court0 = 'os' if task.court.reports: court1 = task.court.reports.id court2 = task.court.id else: court1 = task.court.id court2 = '' tdate = str(task.date) try: for croom in Courtroom.objects.filter(court=task.court): query = QueryDict(mutable=True) query['type'] = 'jednani' query['typSoudu'] = court0 query['krajOrg'] = court1 query['org'] = court2 query['sin'] = croom.desc query['datum'] = '{0.day:d}.{0.month:d}.{0.year:d}'.format( task.date) query['spamQuestion'] = '23' query['druhVec'] = '' url = ROOT_URL + GET_HEARINGS + query.urlencode() sleep(1) res = get(url) soup = BeautifulSoup(res.text, 'html.parser') sched = soup.select('table tr td + td table tr td table tr')[6] if sched.select('b'): continue for ttr in sched.td.table.children: try: ttd = ttr.td ttm = ttd.text.split(':') ttm = datetime(task.date.year, task.date.month, task.date.day, int(ttm[0]), int(ttm[1])) ttd = ttd.find_next_sibling('td') senate, register, number, year = decomposeref( ttd.text.replace(' / ', '/')) register = normreg(register) ttd = ttd.find_next_sibling('td') form = Form.objects.get_or_create(name=ttd.text.strip())[0] ttd = ttd.find_next_sibling('td') judge = Judge.objects.get_or_create( name=ttd.text.strip())[0] ttd = ttd.find_next_sibling('td') parties = ttd.select('td') ttd = ttd.find_next_sibling('td') closed = 'Ano' in ttd.text ttd = ttd.find_next_sibling('td') cancelled = 'Ano' in ttd.text hearing = Hearing.objects.update_or_create( courtroom=croom, time=ttm, senate=senate, register=register, number=number, year=year, form=form, judge=judge, defaults={ 'closed': closed, 'cancelled': cancelled }) if hearing[1]: for query in parties: qts = query.text.strip() if qts: party = Party.objects.get_or_create( name=query.text.strip())[0] hearing[0].parties.add(party) sur_check({'check_psj': True}, qts, task.court, senate, register, number, year, HEARING_URL.format( task.court.id, senate, quote(register), number, year, tdate, tdate)) except: pass task.delete() except: LOGGER.warning( 'Failed to download hearings for {0}, {1.year:d}-{1.month:02d}-{1.day:02d}' .format(task.court_id, task.date)) return LOGGER.debug( 'Downloaded hearings for {0}, {1.year:d}-{1.month:02d}-{1.day:02d}'. format(task.court_id, task.date))
def cron_update2(): nss = Court.objects.get(pk=SUPREME_ADMINISTRATIVE_COURT) croom = Courtroom.objects.get_or_create(court=nss, desc='(neuvedeno)')[0] form = Form.objects.get_or_create(name='Veřejné jednání')[0] try: res = get(LIST_COURTROOMS2) soup = BeautifulSoup(res.text, 'html.parser') for item in soup.select('table.item'): for hearing in Hearing.objects.filter(courtroom__court=nss, auxid=0): hearing.auxid = getauxid(hearing.senate, hearing.register, hearing.number, hearing.year) hearing.save() try: senate = register = number = year = judge = ttm = None parties = [] for trow in item.select('tr'): ths = trow.th.text.strip() tds = trow.td.text.strip() if ths.startswith('Spisová značka:'): senate, register, number, year = decomposeref(tds) elif ths.startswith('Účastníci řízení:'): for query in trow.td: if 'strip' in dir(query): party = Party.objects.get_or_create( name=query.strip())[0] parties.append(party) elif ths.startswith('Předseda senátu:'): judge = Judge.objects.get_or_create(name=tds)[0] elif ths.startswith('Datum jednání:'): dtm = tds.split() dat = list(map(int, dtm[0].split('.'))) tim = list(map(int, dtm[2].split(':'))) ttm = datetime(dat[2], dat[1], dat[0], tim[0], tim[1]) auxid = getauxid(senate, register, number, year) hearing = Hearing.objects.update_or_create(courtroom=croom, time=ttm, senate=senate, register=register, number=number, year=year, form=form, judge=judge, closed=False, cancelled=False, auxid=auxid) if hearing[1]: for party in parties: hearing[0].parties.add(party) sur_check({'check_psj': True}, party.name, nss, senate, register, number, year, HEARING_URL.format(nss.id, senate, quote(register), number, year, ttm.date(), ttm.date())) except: # pragma: no cover pass except: # pragma: no cover LOGGER.warning('Supreme Administrative Court update failed') LOGGER.debug('Downloaded Supreme Administrative Court hearings')
def get_fx_rate(curr, dat, log=None, use_fixed=False, log_fixed=None): LOGGER.debug( 'FX rate requested, currency "{0}" for {1.year:d}-{1.month:02d}-{1.day:02d}, fixed "{2}"' .format(curr, dat, use_fixed)) fixed_list = { 'XEU': {'currency_to': 'EUR', 'fixed_rate': 1, 'date_from': date(1999, 1, 1)}, 'ATS': {'currency_to': 'EUR', 'fixed_rate': 13.7603, 'date_from': date(1998, 12, 31)}, 'BEF': {'currency_to': 'EUR', 'fixed_rate': 40.3399, 'date_from': date(1998, 12, 31)}, 'NLG': {'currency_to': 'EUR', 'fixed_rate': 2.20371, 'date_from': date(1998, 12, 31)}, 'FIM': {'currency_to': 'EUR', 'fixed_rate': 5.94573, 'date_from': date(1998, 12, 31)}, 'FRF': {'currency_to': 'EUR', 'fixed_rate': 6.55957, 'date_from': date(1998, 12, 31)}, 'DEM': {'currency_to': 'EUR', 'fixed_rate': 1.95583, 'date_from': date(1998, 12, 31)}, 'IEP': {'currency_to': 'EUR', 'fixed_rate': .787564, 'date_from': date(1998, 12, 31)}, 'ITL': {'currency_to': 'EUR', 'fixed_rate': 1936.27, 'date_from': date(1998, 12, 31)}, 'LUF': {'currency_to': 'EUR', 'fixed_rate': 40.3399, 'date_from': date(1998, 12, 31)}, 'MCF': {'currency_to': 'EUR', 'fixed_rate': 6.55957, 'date_from': date(1998, 12, 31)}, 'PTE': {'currency_to': 'EUR', 'fixed_rate': 200.482, 'date_from': date(1998, 12, 31)}, 'SML': {'currency_to': 'EUR', 'fixed_rate': 1936.27, 'date_from': date(1998, 12, 31)}, 'ESP': {'currency_to': 'EUR', 'fixed_rate': 166.386, 'date_from': date(1998, 12, 31)}, 'VAL': {'currency_to': 'EUR', 'fixed_rate': 1936.27, 'date_from': date(1998, 12, 31)}, 'GRD': {'currency_to': 'EUR', 'fixed_rate': 340.75, 'date_from': date(2000, 6, 19)}, 'SIT': {'currency_to': 'EUR', 'fixed_rate': 239.64, 'date_from': date(2006, 7, 11)}, 'CYP': {'currency_to': 'EUR', 'fixed_rate': .585274, 'date_from': date(2007, 7, 10)}, 'MTL': {'currency_to': 'EUR', 'fixed_rate': .4293, 'date_from': date(2007, 7, 10)}, 'SKK': {'currency_to': 'EUR', 'fixed_rate': 30.126, 'date_from': date(2008, 7, 8)}, 'EEK': {'currency_to': 'EUR', 'fixed_rate': 15.6466, 'date_from': date(2010, 7, 13)}, 'ROL': {'currency_to': 'RON', 'fixed_rate': 10000, 'date_from': date(2005, 7, 1)}, 'RUR': {'currency_to': 'RUB', 'fixed_rate': 1000, 'date_from': date(1998, 1, 1)}, 'MXP': {'currency_to': 'MXN', 'fixed_rate': 1000, 'date_from': date(1993, 1, 1)}, 'UAK': {'currency_to': 'UAH', 'fixed_rate': 100000, 'date_from': date(1996, 9, 2)}, 'TRL': {'currency_to': 'TRY', 'fixed_rate': 1000000, 'date_from': date(2005, 1, 1)}, 'BGL': {'currency_to': 'BGN', 'fixed_rate': 1000, 'date_from': date(1999, 7, 5)}, 'PLZ': {'currency_to': 'PLN', 'fixed_rate': 10000, 'date_from': date(1995, 1, 1)}, 'CSD': {'currency_to': 'RSD', 'fixed_rate': 1, 'date_from': date(2003, 1, 1)}, } today = date.today() if dat.year < 1991 or dat > today: return None, None, None, 'Chybné datum, data nejsou k disposici' rat = FXrate.objects.filter(date=dat) if rat: txt = rat[0].text else: surl = ( 'https://www.cnb.cz/cs/financni_trhy/devizovy_trh/kurzy_devizoveho_trhu/denni_kurz.xml?' 'date={0.day:d}.{0.month:d}.{0.year:d}'.format(dat)) txt = getcache(surl, DOWNLOAD_REPEAT)[0] if not txt: LOGGER.warning('No connection to CNB server') return None, None, None, 'Chyba spojení se serverem ČNB' try: soup = new_xml(txt) assert soup assert soup.find( 'tabulka', {'typ': 'XML_TYP_CNB_KURZY_DEVIZOVEHO_TRHU'}) dreq = soup.find('kurzy', {'banka': 'CNB'})['datum'] dreq = date(int(dreq[6:]), int(dreq[3:5]), int(dreq[:2])) except: LOGGER.error('Invalid FX table structure for {0.year:d}-{0.month:02d}-{0.day:02d}'.format(dat)) return None, None, None, 'Chyba struktury kursové tabulky' if not rat and (dreq == dat or (today - dat) > DOWNLOAD_WAIT): FXrate(date=dat, text=txt).save() lin = soup.find('radek', {'kod': curr}) frat = 1 curr_rq = curr if not lin: if use_fixed and curr in fixed_list and fixed_list[curr]['date_from'] <= dat: curr = fixed_list[curr]['currency_to'] lin = soup.find('radek', {'kod': curr}) if not lin: return None, None, dreq, 'Kurs není v kursové tabulce' frat = fixed_list[curr_rq]['fixed_rate'] if log_fixed != None: log_fixed.append( {'currency_from': curr_rq, 'currency_to': fixed_list[curr_rq]['currency_to'], 'rate': fixed_list[curr_rq]['fixed_rate'], 'date_from': fixed_list[curr_rq]['date_from']}) else: return None, None, dreq, 'Kurs není v kursové tabulce' try: qty = int(lin['mnozstvi']) if lin.has_attr('kurz'): rate = lin['kurz'] elif lin.has_attr('pomer'): rate = lin['pomer'] rate = float(rate.replace(',', '.')) except: LOGGER.error('Invalid FX table line for {0.year:d}-{0.month:02d}-{0.day:02d}'.format(dat)) return None, None, dreq, 'Chyba řádku kursové tabulky' if log != None: log.append( {'currency': curr, 'quantity': qty, 'rate': rate, 'date_required': dat, 'date': dreq}) return rate / frat, qty, dreq, None
def get_mpi_rate(typ, dat, log=None): LOGGER.debug('MPI rate of type "{0}" requested for {1.year:d}-{1.month:02d}-{1.day:02d}'.format(typ, dat)) now = datetime.now() prefix = 'https://www.cnb.cz/cs/faq/vyvoj_' suffix = '_historie.txt' types = { 'DISC': ('diskontni', 'PLATNA_OD|CNB_DISKONTNI_SAZBA_V_%'), 'LOMB': ('lombard', 'PLATNA_OD|CNB_LOMBARDNI_SAZBA_V_%'), 'REPO': ('repo', 'PLATNA_OD|CNB_REPO_SAZBA_V_%'), } if typ not in types.keys(): return None, 'Chybný druh sazby' if dat.year < 1990 or dat > now.date(): return None, 'Chybné datum, data nejsou k disposici' stat = MPIstat.objects.get_or_create(type=typ) updated = stat[0].timestamp_update.date() if stat[1] or (not MPIrate.objects.filter(type=typ, valid__gte=dat).exists() and (updated - dat) < DOWNLOAD_WAIT): surl = prefix + types[typ][0] + suffix txt = getcache(surl, DOWNLOAD_REPEAT)[0] if not txt: LOGGER.warning('No connection to CNB server') return None, 'Chyba spojení se serverem ČNB' txt = txt.replace('\r', '').split('\n') if txt[0] != types[typ][1]: LOGGER.error('Error in rate table for {}'.format(types[typ][0])) return None, 'Chyba tabulky sazeb (1)' rates = [] try: for lin in txt[1:]: assert lin[8] == '|' rates.append( (float(lin[9:].replace(',', '.')), date(int(lin[:4]), int(lin[4:6]), int(lin[6:8])))) except: LOGGER.error('Error in rate table for {}'.format(types[typ][0])) return None, 'Chyba tabulky sazeb (2)' try: for rat in rates: if stat[1] or (updated - rat[1]) < DOWNLOAD_WAIT: MPIrate.objects.get_or_create( type=typ, rate=rat[0], valid=rat[1]) except: # pragma: no cover LOGGER.error('Error writing in database') return None, 'Chyba zápisu do database (1)' try: MPIstat.objects.get_or_create(type=typ)[0].save() except: # pragma: no cover LOGGER.error('Error writing in database') return None, 'Chyba zápisu do database (2)' res = MPIrate.objects.filter(type=typ, valid__lte=dat).order_by('-valid') if not res.exists(): return None, 'Sazba není k disposici' if log != None: log.append({'type': typ, 'rate': res[0].rate, 'date': dat}) return res[0].rate, None
def get_fx_rate(curr, dat, log=None, use_fixed=False, log_fixed=None): LOGGER.debug( 'FX rate requested, currency "{0}" for {1.year:d}-{1.month:02d}-{1.day:02d}, fixed "{2}"' .format(curr, dat, use_fixed)) fixed_list = { 'XEU': { 'currency_to': 'EUR', 'fixed_rate': 1, 'date_from': date(1999, 1, 1) }, 'ATS': { 'currency_to': 'EUR', 'fixed_rate': 13.7603, 'date_from': date(1998, 12, 31) }, 'BEF': { 'currency_to': 'EUR', 'fixed_rate': 40.3399, 'date_from': date(1998, 12, 31) }, 'NLG': { 'currency_to': 'EUR', 'fixed_rate': 2.20371, 'date_from': date(1998, 12, 31) }, 'FIM': { 'currency_to': 'EUR', 'fixed_rate': 5.94573, 'date_from': date(1998, 12, 31) }, 'FRF': { 'currency_to': 'EUR', 'fixed_rate': 6.55957, 'date_from': date(1998, 12, 31) }, 'DEM': { 'currency_to': 'EUR', 'fixed_rate': 1.95583, 'date_from': date(1998, 12, 31) }, 'IEP': { 'currency_to': 'EUR', 'fixed_rate': .787564, 'date_from': date(1998, 12, 31) }, 'ITL': { 'currency_to': 'EUR', 'fixed_rate': 1936.27, 'date_from': date(1998, 12, 31) }, 'LUF': { 'currency_to': 'EUR', 'fixed_rate': 40.3399, 'date_from': date(1998, 12, 31) }, 'MCF': { 'currency_to': 'EUR', 'fixed_rate': 6.55957, 'date_from': date(1998, 12, 31) }, 'PTE': { 'currency_to': 'EUR', 'fixed_rate': 200.482, 'date_from': date(1998, 12, 31) }, 'SML': { 'currency_to': 'EUR', 'fixed_rate': 1936.27, 'date_from': date(1998, 12, 31) }, 'ESP': { 'currency_to': 'EUR', 'fixed_rate': 166.386, 'date_from': date(1998, 12, 31) }, 'VAL': { 'currency_to': 'EUR', 'fixed_rate': 1936.27, 'date_from': date(1998, 12, 31) }, 'GRD': { 'currency_to': 'EUR', 'fixed_rate': 340.75, 'date_from': date(2000, 6, 19) }, 'SIT': { 'currency_to': 'EUR', 'fixed_rate': 239.64, 'date_from': date(2006, 7, 11) }, 'CYP': { 'currency_to': 'EUR', 'fixed_rate': .585274, 'date_from': date(2007, 7, 10) }, 'MTL': { 'currency_to': 'EUR', 'fixed_rate': .4293, 'date_from': date(2007, 7, 10) }, 'SKK': { 'currency_to': 'EUR', 'fixed_rate': 30.126, 'date_from': date(2008, 7, 8) }, 'EEK': { 'currency_to': 'EUR', 'fixed_rate': 15.6466, 'date_from': date(2010, 7, 13) }, 'ROL': { 'currency_to': 'RON', 'fixed_rate': 10000, 'date_from': date(2005, 7, 1) }, 'RUR': { 'currency_to': 'RUB', 'fixed_rate': 1000, 'date_from': date(1998, 1, 1) }, 'MXP': { 'currency_to': 'MXN', 'fixed_rate': 1000, 'date_from': date(1993, 1, 1) }, 'UAK': { 'currency_to': 'UAH', 'fixed_rate': 100000, 'date_from': date(1996, 9, 2) }, 'TRL': { 'currency_to': 'TRY', 'fixed_rate': 1000000, 'date_from': date(2005, 1, 1) }, 'BGL': { 'currency_to': 'BGN', 'fixed_rate': 1000, 'date_from': date(1999, 7, 5) }, 'PLZ': { 'currency_to': 'PLN', 'fixed_rate': 10000, 'date_from': date(1995, 1, 1) }, 'CSD': { 'currency_to': 'RSD', 'fixed_rate': 1, 'date_from': date(2003, 1, 1) }, } today = date.today() if dat.year < 1991 or dat > today: return None, None, None, 'Chybné datum, data nejsou k disposici' rat = FXrate.objects.filter(date=dat) if rat: txt = rat[0].text else: surl = ( 'https://www.cnb.cz/cs/financni_trhy/devizovy_trh/kurzy_devizoveho_trhu/denni_kurz.xml?' 'date={0.day:d}.{0.month:d}.{0.year:d}'.format(dat)) txt = getcache(surl, DOWNLOAD_REPEAT)[0] if not txt: LOGGER.warning('No connection to CNB server') return None, None, None, 'Chyba spojení se serverem ČNB' try: soup = new_xml(txt) assert soup assert soup.find('tabulka', {'typ': 'XML_TYP_CNB_KURZY_DEVIZOVEHO_TRHU'}) dreq = soup.find('kurzy', {'banka': 'CNB'})['datum'] dreq = date(int(dreq[6:]), int(dreq[3:5]), int(dreq[:2])) except: LOGGER.error( 'Invalid FX table structure for {0.year:d}-{0.month:02d}-{0.day:02d}' .format(dat)) return None, None, None, 'Chyba struktury kursové tabulky' if not rat and (dreq == dat or (today - dat) > DOWNLOAD_WAIT): FXrate(date=dat, text=txt).save() lin = soup.find('radek', {'kod': curr}) frat = 1 curr_rq = curr if not lin: if use_fixed and curr in fixed_list and fixed_list[curr][ 'date_from'] <= dat: curr = fixed_list[curr]['currency_to'] lin = soup.find('radek', {'kod': curr}) if not lin: return None, None, dreq, 'Kurs není v kursové tabulce' frat = fixed_list[curr_rq]['fixed_rate'] if log_fixed != None: log_fixed.append({ 'currency_from': curr_rq, 'currency_to': fixed_list[curr_rq]['currency_to'], 'rate': fixed_list[curr_rq]['fixed_rate'], 'date_from': fixed_list[curr_rq]['date_from'] }) else: return None, None, dreq, 'Kurs není v kursové tabulce' try: qty = int(lin['mnozstvi']) if lin.has_attr('kurz'): rate = lin['kurz'] elif lin.has_attr('pomer'): rate = lin['pomer'] rate = float(rate.replace(',', '.')) except: LOGGER.error( 'Invalid FX table line for {0.year:d}-{0.month:02d}-{0.day:02d}'. format(dat)) return None, None, dreq, 'Chyba řádku kursové tabulky' if log != None: log.append({ 'currency': curr, 'quantity': qty, 'rate': rate, 'date_required': dat, 'date': dreq }) return rate / frat, qty, dreq, None
def get_mpi_rate(typ, dat, log=None): LOGGER.debug( 'MPI rate of type "{0}" requested for {1.year:d}-{1.month:02d}-{1.day:02d}' .format(typ, dat)) now = datetime.now() prefix = 'https://www.cnb.cz/cs/faq/vyvoj_' suffix = '_historie.txt' types = { 'DISC': ('diskontni', 'PLATNA_OD|CNB_DISKONTNI_SAZBA_V_%'), 'LOMB': ('lombard', 'PLATNA_OD|CNB_LOMBARDNI_SAZBA_V_%'), 'REPO': ('repo', 'PLATNA_OD|CNB_REPO_SAZBA_V_%'), } if typ not in types.keys(): return None, 'Chybný druh sazby' if dat.year < 1990 or dat > now.date(): return None, 'Chybné datum, data nejsou k disposici' stat = MPIstat.objects.get_or_create(type=typ) updated = stat[0].timestamp_update.date() if stat[1] or ( not MPIrate.objects.filter(type=typ, valid__gte=dat).exists() and (updated - dat) < DOWNLOAD_WAIT): surl = prefix + types[typ][0] + suffix txt = getcache(surl, DOWNLOAD_REPEAT)[0] if not txt: LOGGER.warning('No connection to CNB server') return None, 'Chyba spojení se serverem ČNB' txt = txt.replace('\r', '').split('\n') if txt[0] != types[typ][1]: LOGGER.error('Error in rate table for {}'.format(types[typ][0])) return None, 'Chyba tabulky sazeb (1)' rates = [] try: for lin in txt[1:]: assert lin[8] == '|' rates.append((float(lin[9:].replace(',', '.')), date(int(lin[:4]), int(lin[4:6]), int(lin[6:8])))) except: LOGGER.error('Error in rate table for {}'.format(types[typ][0])) return None, 'Chyba tabulky sazeb (2)' try: for rat in rates: if stat[1] or (updated - rat[1]) < DOWNLOAD_WAIT: MPIrate.objects.get_or_create(type=typ, rate=rat[0], valid=rat[1]) except: # pragma: no cover LOGGER.error('Error writing in database') return None, 'Chyba zápisu do database (1)' try: MPIstat.objects.get_or_create(type=typ)[0].save() except: # pragma: no cover LOGGER.error('Error writing in database') return None, 'Chyba zápisu do database (2)' res = MPIrate.objects.filter(type=typ, valid__lte=dat).order_by('-valid') if not res.exists(): return None, 'Sazba není k disposici' if log != None: log.append({'type': typ, 'rate': res[0].rate, 'date': dat}) return res[0].rate, None