def lazy_setup(): ''' !! CAREFULL !! Lazy setup to create the database connection ''' db.connect() create_tables()
def create_tables(): db.connect() if not User.table_exists(): User.create_table() admin = User(username='******', admin=True, active=True, email="*****@*****.**") admin.set_password('admin') admin.save() if not Project.table_exists(): Project.create_table() if not Addon.table_exists(): Addon.create_table() if not Download.table_exists(): Download.create_table()
def set_repository_phab_hosted(self, details, callsign): connection = db.connect() with connection.cursor() as cursor: cursor.execute(repository_hosted_update_sql, (details, callsign)) db.commit(connection) db.disconnect(connection)
def set_repository_policy(self, callsign, view_policy, edit_policy, push_policy): connection = db.connect() with connection.cursor() as cursor: cursor.execute(repository_policy_update_sql, (view_policy, edit_policy, push_policy, callsign)) db.commit(connection) db.disconnect(connection)
def connectDB(): cfg_path = os.path.join(os.path.dirname(__file__), os.pardir, DATABASE_CFG) config = parse_config_file(cfg_path) db = MysqlConnDb() if config and db.connect(**config): log.logger.debug("connect to database success!") return db else: log.logger.error("connect to database fail!") return None
def main(): logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO) db.connect() bot = init_detwtr_bot() for job in Job.select(): logging.info("Processing job: {id}".format(id=job.id)) if (datetime.datetime.now() - job.tweet.created_at) < datetime.timedelta(minutes=5): logging.info("Tweet is not old enough, wait a few more minutes") continue is_duplicate = False for tweet in Tweet.select().where((Tweet.user == job.tweet.user) & ( Tweet.created_at > job.tweet.created_at) & ~(Tweet.is_deleted) & ~(Tweet.is_withheld)): levdist = editdistance.eval(tweet.text, job.tweet.text) if levdist <= max(3, int(math.ceil(14 / 140 * len(job.tweet.text)))) and job.tweet.media == tweet.media: is_duplicate = True logging.info("Duplicate found:\n{tweet_1}\n---\n{tweet_2}".format(tweet_1=job.tweet.text, tweet_2=tweet.text)) break if is_duplicate: logging.info("Tweet is very similar to other tweets, won't restore") job.delete_instance() else: logging.info("Found no similar tweets, going to restore! :3") text = job.tweet.text.replace("@", "&") try: if job.tweet.media: media = io.BytesIO(job.tweet.media) resp = bot.upload_media(media=media) bot.update_status(status=text, media_ids=[resp["media_id"]]) else: bot.update_status(status=text) logging.info("Tweet restored, all is well...") job.delete_instance() except TwythonError as e: logging.error("TwythonError: {error}".format(error=repr(e))) if "Status is a duplicate" in e.msg: job.delete_instance() db.close()
def create(self, rules): connection = db.connect() new_phid = "PHID-PLCY-prophessor%s" % ( ''.join([random.choice(string.ascii_letters + string.digits) for _ in range(10)]) ) timestamp = int(time.time()) with connection.cursor() as cursor: cursor.execute(insert_sql, (new_phid, json.dumps(rules), timestamp, timestamp)) db.commit(connection) db.disconnect(connection) return new_phid
def get_repository_phab_hosted(self, callsign): connection = db.connect() with connection.cursor() as cursor: cursor.execute(repository_hosted_select_sql, (callsign,)) # Should only return one row. result = [] for row in cursor: result = row['details'] db.disconnect(connection) return result
def before_request(): db.connect() g.redis = redis.StrictRedis()
logging.info("Tweet not found! :(") event_db = Event(event="withheld", user=User.get_or_create(user_id=str(data["status_withheld"]["user_id"]))[0], tweet=instance) event_db.save() if instance: # mark this tweet as deleted instance.is_withheld = True instance.save() # add tweet to job queue jobs_db = Job(tweet=instance) try: jobs_db.save() except IntegrityError: logging.error("Tweet is already marked for restoration") def on_error(self, status_code, data): logging.error("Error while processing stream: {}".format(status_code)) if __name__ == '__main__': logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO) db.connect() db.create_tables([Tweet, User, Event, Job, ], safe=True) stream = init_detwtr_stream() stream.user() db.close()
from database import db from dto.visit import Visit import datetime con = db.connect() # visit = Visit # visit.chat_id = 382350841 # visit.category_id = 5 # visit.visit_date = datetime.datetime.today() # db.save_visit(con, visit) # count = db.get_visits_count_last_week(con) # print(count) visits = db.get_visits_count_by_categories(con) categories = db.select_categories_names_by_ids(con, tuple(visits.keys())) result = {} for visit in visits.keys(): print(categories[visit] + ' ' + str(visits[visit])) # for key in visits.keys(): # print(str(key) + ' ' + str(visits[key])) #db.deleteCategory(con, 3) #row = db.get_current_state(con, 382350841) #print (row[0]) # categories = db.select_all_categories(con) # for category in categories: # print (category.name)
from database import db, User, Role, UserRoles, Contact,Tweeb db.connect() db.create_tables([User, Role, UserRoles, Contact, Tweeb], safe=True)
import config import database.db as db import database.postDAO as postDAO import database.companyDAO as companyDAO # Make a connection and grab a cursor db = db.connect(config.DATABASE['user'], config.DATABASE['password']) selectCursor = db.cursor() # Get one company by its id company = companyDAO.getCompany(400, selectCursor) print(company['twitter_handle']) # Get all companies in an array of dicts companies = companyDAO.getCompanies(selectCursor) for c in companies: print(c['name']) # Get one post by id post = postDAO.getPost(400, selectCursor) print(post['content']) # Get all posts in an array of dicts posts = postDAO.getPosts(selectCursor) for post in posts: print(post['likes_count']) # Get all posts by one company using company 400 posts = postDAO.getPostsByCompanyId(430, selectCursor) for post in posts: print(post['created_time'])
from settings import CSV_CITY, CSV_SCHOOL, INSEE_CODE from database import db from apps.cities.models import City from apps.schools.models import School from apps.doctors.models import Doctor import pandas as pd import matplotlib.pyplot as plt import apps.cities.city as ct import apps.schools.school as sch import apps.doctors.doctor as dc import apps.data as dt import argparse if __name__ == '__main__': db.connect(reuse_if_open=True) parser = argparse.ArgumentParser() parser.add_argument( "action", help="Choose an action to execute", nargs="?", choices=[ "compute_city", "compute_school", "compute_doctor", "show_city_chart", "show_school_chart", "show_doctor_chart" ], ) args = parser.parse_args() if args.action == "compute_city": # compute and store result in sql
def setUp(self): db.connect('sqlite:///:memory:') Base.metadata.create_all(db._engine)