def save_opportunities_to_db(self): opportunities = ZendeskAPIHandler().get_opportunities() db_client = DatabaseHandler() for opportunity in opportunities: db_client.save_opportunity_to_db(opportunity['data'])
def save_contacts_to_db(self): contacts = ZendeskAPIHandler().get_contacts() db_client = DatabaseHandler() for contact in contacts: db_client.save_contact_to_db(contact['data'])
def fetch(): db = DatabaseHandler() for downloader in article_downloaders: if site_name in downloader.site_names: for article in downloader.get_articles_by_site_name(site_name): if article.publish_date >= last_date: db.add_article_or_get_id(article) fetch_progress[site_name] = article.publish_date else: break fetch_progress.pop(site_name)
def test_sensors() -> None: cpu_mon = CPUTemp(f"/sys/class/thermal/thermal_zone0/temp") airq = AirQ(1) temphum = TempHum(2) db_handler = DatabaseHandler("test01") def write_test_helper() -> None: cpu_reading = cpu_mon.get_reading() db_handler.write_to_db("CPU-Temp", cpu_reading) print(f"Wrote CPU Temperature: {cpu_reading[1]} at {cpu_reading[0]}") aq_reading = airq.get_reading() db_handler.write_to_db("AirQ", aq_reading) print(f"Wrote Air Quality: {aq_reading[1]} at {aq_reading[0]}") rtemp_hum = temphum.get_reading("temperature") rtemp = (rtemp_hum[0], rtemp_hum[1][0]) db_handler.write_to_db("Room_Temp", rtemp) print(f"Wrote Room Temperature: {rtemp[1]} at {rtemp[0]}") rhum = (rtemp_hum[0], rtemp_hum[1][1]) db_handler.write_to_db("Room_Hum", rhum) print(f"Wrote Room Humidity: {rhum[1]} at {rhum[0]}") schd = Scheduler(5, write_test_helper) schd.start() time.sleep(30) schd.stop()
def auto_merge(): global MERGE_PROGRESS global settings db_new_handler = DatabaseHandler() last_id = settings.get_last_processed_event_id() old_ids = db_new_handler.get_event_ids_to(last_id) new_ids = db_new_handler.get_event_ids_from(last_id + 1) old_count = len(old_ids) new_count = len(new_ids) MERGE_PROGRESS = 0 total_count = new_count * old_count + (new_count * (new_count - 1)) / 2 count = 0 for i1 in range(0, new_count): for i2 in range(0, old_count): if are_same(new_ids[i1], old_ids[i2], db_new_handler): db_new_handler.add_events_to_events_merge(new_ids[i1], old_ids[i2]) count += 1 MERGE_PROGRESS = 100 * count / total_count for i2 in range(i1 + 1, new_count): if are_same(new_ids[i1], new_ids[i2], db_new_handler): db_new_handler.add_events_to_events_merge(new_ids[i1], new_ids[i2]) count += 1 MERGE_PROGRESS = 100 * count / total_count MERGE_PROGRESS = 100 if new_count > 0: settings.put_last_processed_event_id(new_ids[new_count - 1])
def test_db_write() -> None: cpu_mon = CPUTemp(f"/sys/class/thermal/thermal_zone0/temp") db_handler = DatabaseHandler("test01") def write_test_helper() -> None: reading = cpu_mon.get_reading() db_handler.write_to_db("CPU-Temp", reading) print(f"Wrote CPU Temperature: {reading[1]} at {reading[0]}") schd = Scheduler(5, write_test_helper) schd.start() time.sleep(30) schd.stop()
def auto_merge(): global MERGE_PROGRESS global settings db_new_handler = DatabaseHandler() last_id = settings.get_last_processed_event_id() old_ids = db_new_handler.get_event_ids_to(last_id) new_ids = db_new_handler.get_event_ids_from(last_id + 1) old_count = len(old_ids) new_count = len(new_ids) MERGE_PROGRESS = 0 total_count = new_count * old_count + (new_count * (new_count - 1)) / 2 count = 0 for i1 in range(0, new_count): for i2 in range(0, old_count): if are_same(new_ids[i1], old_ids[i2], db_new_handler): db_new_handler.add_events_to_events_merge( new_ids[i1], old_ids[i2]) count += 1 MERGE_PROGRESS = 100 * count / total_count for i2 in range(i1 + 1, new_count): if are_same(new_ids[i1], new_ids[i2], db_new_handler): db_new_handler.add_events_to_events_merge( new_ids[i1], new_ids[i2]) count += 1 MERGE_PROGRESS = 100 * count / total_count MERGE_PROGRESS = 100 if new_count > 0: settings.put_last_processed_event_id(new_ids[new_count - 1])
from file_handler import FileHandler from validator import Validator from view import View from db import DatabaseHandler import pickle import sys # hasitha try: database_name = sys.argv[1] except IndexError: database_name = "db" try: database = pickle.load(open(database_name + ".p", "rb")) except FileNotFoundError: database = DatabaseHandler(Validator(), database_name) database.load() try: pickle.dump(database, open(database_name + ".p", "wb")) except TypeError: pass except EOFError: database = DatabaseHandler(Validator(), database_name) database.load() try: pickle.dump(database, open(database_name + ".p", "wb")) except TypeError: pass view = View() cli = Command(FileHandler(Validator()), database, view) cli.cmdloop()
from command import Command from file_handler import FileHandler from validator import Validator from view import View from db import DatabaseHandler from unit_testing import MainTest cli = Command(FileHandler(Validator()), DatabaseHandler(Validator()), View()) cli.cmdloop()
from flask import Flask, request, jsonify from db import DatabaseHandler import string import random app = Flask(__name__) app.config['DB_NAME'] = "url.db" db = DatabaseHandler(app.config['DB_NAME']) db.drop_tables() db.init() def generate_key(url: str) -> str: letters = string.ascii_letters return ''.join(random.choice(letters) for i in range(10)) # API @app.route('/api/add', methods=['POST']) def add_url(): if 'url' in request.args: url = request.args['url'] key = generate_key(url) db.add(url=url, key=key) return jsonify(dict(url=url, key=key))
import asyncio from pydantic import AnyUrl from typing import Any from schemas import TransactionBase from db import DatabaseHandler db = DatabaseHandler() class StockCollectorHandler: def __init__(self, url: AnyUrl): self.url = url async def handle(self) -> None: websocket = await self._establish_connection(self.url) while True: response = await self._receive_response(websocket) try: model = await self._serialize(response) except (KeyError, IndexError): continue self._save_to_db(model)
print( "Make sure to run this using the platform-specifc scripts, not directly with python!" ) def db_safe_current_time() -> int: return math.floor(time.time() * 1000) not_sus_website = "https://wikipedia.org/" tokens = [] db_connection = sqlite3.connect("db/datameridian.db") cursor = db_connection.cursor() db = DatabaseHandler(db_connection) templates = Jinja2Templates(directory="templates") app = FastAPI() api = APIRouter() # Important stuff for CORP so that the frontend can use SharedArrayBuffer @app.middleware("http") async def add_corp_headers(request: Request, call_next): response = await call_next(request) response.headers["Cross-Origin-Opener-Policy"] = "same-origin" response.headers["Cross-Origin-Embedder-Policy"] = "require-corp" return response
from flask import ( Flask, render_template, redirect, request, url_for ) from db import DatabaseHandler app = Flask("appname") app.config["DB_NAME"] = "blog.db" db = DatabaseHandler(app.config["DB_NAME"]) @app.route("/login", methods=["GET", "POST"]) def login(): if request.method == "POST": username = request.form['username'] password = request.form['password'] if db.do_login(username=username, password=password): return render_template('index.html', username=username, password=password) @app.route("/register", methods=["GET", "POST"]) def register(): pass @app.route("/post/<int:id>") def view_post(id):
import time from scheduler import Scheduler from sensors import CPUTemp, AirQ, TempHum from db import DatabaseHandler cpu_mon = CPUTemp(f"/sys/class/thermal/thermal_zone0/temp") airq = AirQ(1) temphum = TempHum(2) db_handler = DatabaseHandler("test01") def read_sensors() -> None: cpu_mon = CPUTemp(f"/sys/class/thermal/thermal_zone0/temp") airq = AirQ(1) temphum = TempHum(2) db_handler = DatabaseHandler("test01") def db_write_helper() -> None: cpu_reading = cpu_mon.get_reading() db_handler.write_to_db("CPU-Temp", cpu_reading) #print(f"Wrote CPU Temperature: {cpu_reading[1]} at {cpu_reading[0]}") aq_reading = airq.get_reading() db_handler.write_to_db("AirQ", aq_reading) #print(f"Wrote Air Quality: {aq_reading[1]} at {aq_reading[0]}") rtemp_hum = temphum.get_reading("temperature") rtemp = (rtemp_hum[0], rtemp_hum[1][0]) db_handler.write_to_db("Room_Temp", rtemp) #print(f"Wrote Room Temperature: {rtemp[1]} at {rtemp[0]}") rhum = (rtemp_hum[0], rtemp_hum[1][1]) db_handler.write_to_db("Room_Hum", rhum) #print(f"Wrote Room Humidity: {rhum[1]} at {rhum[0]}")
FILE_NAME = os.path.join(root_dir, "settings.txt") def get_last_processed_event_id(self): f = open(self.FILE_NAME, "r") last_id = f.read() f.close() return int(last_id) def put_last_processed_event_id(self, last_id): f = open(self.FILE_NAME, "w") f.write(str(last_id)) f.close() DEFAULT_ARTICLES_COUNT = 10 db_handler = DatabaseHandler() settings = Settings() MERGE_PROGRESS = 0 @app.route('/') def redirect_to_events(): return redirect(url_for('events')) def get_events_group(id): id = db_handler.get_event_set_for_event_by_id(id) events_ids = db_handler.get_events_set_by_id(id) events = [get_extended_event(id)] for event_id in events_ids: