def test_example(self, engine): users = get_table('users', columns=['id', 'name', 'age', 'password']) start = time.time() i = users.insert() i.execute(name='Mary', age=30, password='******') i.execute({'name': 'John', 'age': 42}, {'name': 'Susan', 'age': 57}, {'name': 'Carl', 'age': 33}) s = users.select() # SELECT * FROM users rs = s.execute() row = rs.fetchone() assert row.id == 1 assert row['name'] == 'Mary' assert row.age == 30 assert row[users.c.password] == 'secret' stuff = [(row.name, row.age) for row in rs] assert stuff == [('John', 42), ('Susan', 57), ('Carl', 33)] # row = db.engine.execute('select sum_of_ages()').fetchone() row = engine.execute('select sum_of_ages()').fetchone() assert row[0] == 162 # make sure this stuff is performant test_time = time.time() - start assert test_time < 0.1, test_time
def main(args): output_folder = os.path.expanduser(args.output_folder) books = helper.get_table(output_folder, force=args.force) if args.gui: app = gui.create() app.set_output_folder(output_folder) app.populate_genres(helper.get_genres(books)) app._helper = helper app._books = books threading.Thread(app.mainloop()).start() return 0 if args.list_genres: print("\nAvailable genre options:") genres = helper.get_genres(books) for key in sorted(genres): print(" '{}': {} books".format(key, genres[key])) print() return 0 if args.list_books is not None: print("\nList of available books:") count = 0 books = helper.get_books_in_genres(books, args.list_books) for book in sorted(books): count += 1 print(" {}. {}".format(count, book)) print() return 0 if not args.genre and not args.title and not args.all: print("\nNo books selected to download!") print(" Please select books by title (--title) or genre (--genre)") print(" If you want to download everything use --all\n") return 1 helper.download_books( books, args.output_folder, force=args.force, selected_genre=args.genre, selected_title=args.title, pdf=(not args.only_epub), epub=(not args.only_pdf), confirm_download=args.confirm_before_download, verbose=args.verbose, ) print("\nFinish downloading.") return 0
def test_example_2(self, engine): """ Confirm that we get a fresh database each time. No rows from the previous test should appear here. """ users = get_table('users') i = users.insert() i.execute(name='Margo', age=29, password='******') s = users.select() rs = s.execute() row = rs.fetchone() assert row.id == 1 assert row['name'] == 'Margo' assert row.age == 29 assert row[users.c.password] == 'broccoli' # no more after the first row assert rs.fetchall() == []
def get_table(): validated = validateGETRequest(request) if validated != "validated": return validated # Get items from the helper columns = helper.get_table() # Return 404 if item not found if 'error' in columns: response = Response(json.dumps(columns), status=400, mimetype='application/json') response.headers["Access-Control-Allow-Origin"] = "*" return response res_data = { 'table': columns } # Return response response = Response(json.dumps(res_data), status=200, mimetype='application/json') response.headers["Access-Control-Allow-Origin"] = "*" return response
full_text = helper.clear_text_of_ambigous_chars(bs.getText()) text = bs.findAll(text=re.compile(status_pattern))[0] status_raw = re.findall(status_pattern,text)[0] status = datetime.datetime.strptime(status_raw, '(Stand: %d.%m.%Y, %H:%M Uhr)').strftime("%Y-%m-%d") cases_raw = re.findall(cases_pattern, full_text)[0] cases = int(re.findall(r'[0-9]+', cases_raw)[0]) add_to_database("09184", status, cases, "Kreis München") table = bs.find('table', {'class':'contenttable'}) data = dict(helper.get_table(table)) community = { 'Aschheim': { 'uid': '09184112', cases: -1 }, 'Aying': { 'uid': '09184137', cases: -1 }, 'Baierbrunn':{ 'uid': '09184113', cases: -1 }, 'Brunnthal': { 'uid': '09184114', cases: -1 }, 'Garching b. München': { 'uid': '09184119', cases: -1 }, 'Gräfelfing': { 'uid': '09184120', cases: -1 }, 'Grasbrunn':{ 'uid': '09184121', cases: -1 }, 'Grünwald':{ 'uid': '09184122', cases: -1 }, 'Haar':{ 'uid': '09184123', cases: -1 }, 'Höhenkirchen-Siegertsbrunn':{ 'uid': '09184127', cases: -1 }, 'Hohenbrunn':{ 'uid': '09184129', cases: -1 }, 'Ismaning':{ 'uid': '09184130', cases: -1 }, 'Kirchheim b. München':{ 'uid': '09184131', cases: -1 },
import datetime import re import locale locale.setlocale(locale.LC_TIME, "de_DE.utf-8") import helper import scrape from database_interface import * main_url = "https://www.landkreis-harburg.de/corona" req = scrape.request_url(main_url) bs = BeautifulSoup(req.text, "html.parser") text_match = re.compile("Zahl erfasster Coronafälle im Landkreis Harburg") text_position = bs.find(text=text_match) table = text_position.findNext('table') data = helper.get_table(table) # remove table head data.pop(0) for row in data: status = helper.get_status(row[0]) cases = int(row[1]) add_to_database("03353", status, cases, "Kreis Harburg")
status_pattern = re.compile(r'.*Stand: .*') cases_pattern = re.compile(r'.*Es gibt aktuell.*') DISTRICT_UID = "08135" # Aktuelle Informationen zum Coronavirus (Stand: 18.03.2020, 12:00 Uhr) status_raw = bs.findAll(text=re.compile("Stand"))[0] status= datetime.datetime.strptime(status_raw, 'Aktuelle Informationen zum Coronavirus (Stand: %d.%m.%Y, %H:%M Uhr)').strftime("%Y-%m-%d %H:%M:%S") text_match = re.compile("Was gibt’s neues?") text_position = bs.find(text=text_match) table = text_position.findNext('table') data_total=helper.get_table(table) cases = helper.get_number_only(data_total[1][0]) community = { # heidenheim an der brenz 'Stadt Heidenheim': { 'uid': '08135019', cases: -1 }, 'Giengen': { 'uid': '08135016', cases: -1 }, 'Herbrechtingen': { 'uid': '08135020', cases: -1 }, 'Niederstotzingen': { 'uid': '08135027', cases: -1 }, 'Dischingen': { 'uid': '08135010', cases: -1 }, 'Gerstetten': { 'uid': '08135015', cases: -1 }, 'Hermaringen': { 'uid': '08135021', cases: -1 }, 'Königsbronn': { 'uid': '08135025', cases: -1 }, 'Nattheim': { 'uid': '08135026', cases: -1 }, 'Sontheim': { 'uid': '08135031', cases: -1 },
import requests import datetime import re import locale import dateparser locale.setlocale(locale.LC_TIME, "de_DE.utf-8") import helper import scrape from database_interface import * main_url = "https://www.landkreis-harburg.de/corona" req = scrape.request_url(main_url) bs = BeautifulSoup(req.text, "html.parser") table = bs.find('table') data = helper.get_table(bs.find('table')) # remove table head #data.pop(0) for row in data: status = dateparser.parse(row[0]).strftime("%Y-%m-%d") cases = int(row[1]) add_to_database("03353", status, cases, "Kreis Harburg")