def __commit(self): """ """ try: handles = [i.handle for i in self.motifs] if len(handles) != len(set(handles)): pdb.set_trace() r = session.query(NR_release).filter(NR_release.id == self.release.id).first() if not r: session.add(self.release) session.add_all(self.motifs) session.add_all(self.loops) session.add_all(self.history) session.add_all(self.intersection) session.add_all(self.release_diff) session.commit() logging.info("Successful update") except sqlalchemy.exc.SQLAlchemyError, e: logging.error("Update failed. SQLAlchemy error. Rolling back.") logging.error(str(e)) session.rollback() self.remove_release(self.release.id) sys.exit()
def fetchGamelogs(self, player): # Delete all gamelogs for player for gamelog in player.gamelogs: session.delete(gamelog) scraper = Scraper() logs = scraper.getGamelogs(player.id) gamelogs = [] for log in logs: gamelog = Gamelog() gamelog.player = player gamelog.game_id = log['game_id'] gamelog.MIN = log['MIN'] gamelog.FGM = log['FGM'] gamelog.FGA = log['FGA'] gamelog.FG_PCT = log['FG_PCT'] gamelog.FG3M = log['FG3M'] gamelog.FG3A = log['FG3A'] gamelog.FG3_PCT = log['FG3_PCT'] gamelog.FG3M = log['FTM'] gamelog.FG3A = log['FTA'] gamelog.FG3_PCT = log['FT_PCT'] gamelog.OREB = log['OREB'] gamelog.DREB = log['DREB'] gamelog.REB = log['REB'] gamelog.AST = log['AST'] gamelog.STL = log['STL'] gamelog.BLK = log['BLK'] gamelog.TOV = log['TOV'] gamelog.PTS = log['PTS'] gamelog.DK = self.calcDK(log) gamelogs.append(gamelog) session.add_all(gamelogs) session.commit()
def fetchPlayers(self): scraper = Scraper() players = scraper.getPlayerIds() instances = [] for player in players: if player['id'] == 299: continue player_inst = Player(name=player['name'], id=player['id']) instances.append(player_inst) session.add_all(instances) session.commit()
def __release_diff_commit(self): """ """ try: session.add_all(self.release_diff) session.query(NR_handle).delete() session.commit() logging.info("Successful update") except sqlalchemy.exc.SQLAlchemyError, e: logging.error("Update failed. SQLAlchemy error. Rolling back.") logging.error(str(e)) session.rollback() sys.exit()
def __import_qa_from_csv(self, ifn, release_id): """Reads the csv file, imports all distances, deletes the file when done to avoid stale data and free up disk space""" logging.info('Importing qa') reader = csv.reader(open(ifn, 'rb'), delimiter=',', quotechar='"') QA = [] for i, row in enumerate(reader): modres = row[2] if modres == '': modres = None compl = row[4] if compl == '': compl = None QA.append(LoopQA(id = row[0], status = row[1], modifications = modres, nt_signature = row[3], complementary = compl, release_id = release_id)) os.remove(ifn) session.add_all(QA) session.commit() logging.info('%s loops checked and imported' % len(QA))
from models import Publisher, Book, Shop, Stock, Sale, session if __name__ == '__main__': session.add_all([ Publisher(id=1, name='Поставщик №1'), Publisher(id=2, name='Поставщик №2'), Publisher(id=3, name='Поставщик №3') ]) session.commit() session.add_all([ Book(id=1, title='Книга 1 поставщика №1', id_publisher=1), Book(id=2, title='Книга 2 поставщика №1', id_publisher=1), Book(id=3, title='Книга 3 поставщика №1', id_publisher=1), Book(id=4, title='Книга 1 поставщика №2', id_publisher=2), Book(id=5, title='Книга 2 поставщика №2', id_publisher=2), Book(id=6, title='Книга 1 поставщика №3', id_publisher=3) ]) session.commit() session.add_all([ Shop(id=1, name='Магазин №1'), Shop(id=2, name='Магазин №2') ]) session.commit() session.add_all([ Stock(id=1, count=3, id_book=1, id_shop=1), Stock(id=2, count=2, id_book=1, id_shop=2), Stock(id=3, count=5, id_book=2, id_shop=2), Stock(id=4, count=0, id_book=3, id_shop=1), Stock(id=5, count=2, id_book=4, id_shop=1), Stock(id=6, count=3, id_book=4, id_shop=2), Stock(id=7, count=4, id_book=6, id_shop=1)
def _log_sync(task_id, fact_ids): """Adding entry to sychronization log model.""" entries = [SyncLog(fact_id=fact_id, task_id=task_id) for fact_id in fact_ids] session.add_all(entries) session.commit()
def __commit(self): try: session.add(self.release) session.add_all(self.motifs) session.add_all(self.loops) session.add_all(self.history) session.add_all(self.intersection) session.add_all(self.release_diff) session.add_all(self.loop_order) session.add_all(self.loop_positions) session.add_all(self.loop_discrepancy) session.commit() logging.info('Successful update') except sqlalchemy.exc.SQLAlchemyError, e: logging.error('Update failed. Rolling back.') logging.error(str(e)) session.rollback() self.remove_release(self.release.id) sys.exit()
def populate_data(session): sports = [ 'Soccer', 'Basketball', 'Baseball', 'Frisbee', 'Snowboarding', 'Rock Climbing', 'Foosball', 'Skating', 'Hockey' ] sports = [Sport(title=t) for t in sports] session.add_all(sports) session.commit() def find_object(name, objects): try: return next(o for o in objects if o.title == name) except: print 'Error finding id of object with name', name raise gears = [ { 'title': 'Basketball Net', 'description': ( "A QUALITY SLAM DUNK If you're not" "impressed with the quality of our basketball nets," "we'll refund your money-no questions asked" ), 'sport_name': 'Basketball', }, { 'title': 'Climbing Harness', 'description': ( "Traditional buckle, harness construction distributes" " pressure to keep you comfortable while climbing;waist" " belt and leg loop are lineed with breathable mesh to" " ensure comfort in warm temperatures." ), 'sport_name': 'Rock Climbing', }, { 'title': 'Foosball Tabletop', 'description': 'High Quality wood build withstands wear', 'sport_name': 'Foosball', }, { 'title': 'Mondor Knee-High Skating Socks 2 Pairs', 'description': '85% nylon microfiber, 15% lycra (r) spandex', 'sport_name': 'Skating', }, { 'title': 'Nike Swoosh Headband', 'description': ( 'Nike Swoosh Headband, ' 'Embroidered Swoosh logo, ' 'Machine washable, ' 'Easy care' ), 'sport_name': 'Basketball', }, { 'title': 'Stick', 'description': ( 'Includes official size 65mm low density street' ' hockey ball' ), 'sport_name': 'Hockey', }, { 'title': 'Goggles', 'description': ( "Swimming Goggles, PHELRENA Professional Swim" " Goggles Anti Fog UV Protection No Leaking" " for Adult Men Women Kids Swim" ), 'sport_name': 'Snowboarding', }, { 'title': 'Two shinguards', 'description': ( "2 Pair Youth Soccer Shin Guards, Kids Soccer" " Child Calf Protective Gear Soccer Equipment" " for 5-12 Years Old Boys Girls Children Teenagers" ), 'sport_name': 'Soccer', }, { 'title': 'Shinguards', 'description': 'Hard shell with foam backing for added protection', 'sport_name': 'Soccer', }, { 'title': 'Frisbee', 'description': ( "Discraft 175 gram Ultra-Star Sportdisc-Nite-Glo," " colors may vary'" ), 'sport_name': 'Frisbee', }, { 'title': 'Bat', 'description': ( "PowerNet Sweet Spot Training Bat and 3.2\"" " Progressive Weighted Ball (9 Pack) PRO Bundle" " for Baseball Softball" ), 'sport_name': 'Baseball', }, { 'title': 'Jersey', 'description': "adidas Men's Soccer Estro Jersey'", 'sport_name': 'Soccer', }, { 'title': 'Soccer Cleats', 'description': ( "Dream Pairs 151028-151030 Men's Sport" " Flexible Athletic Free Running Light Weight" " Indoor/Outdoor Lace Up Soccer Shoes" ), 'sport_name': 'Soccer', } ] # processing the relationship Sport-Gears for gear in gears: obj = find_object(gear['sport_name'], sports) del gear['sport_name'] gear['sport'] = obj gear['sport_id'] = obj.id gears = [Gear(**g) for g in gears] session.add_all(gears) session.commit() # users user_data = [ { 'name': 'Dale', 'email': '*****@*****.**', 'password': '******' }, { 'name': 'Jenny', 'email': '*****@*****.**', 'password': '******' }, { 'name': 'Trong Nguyen', 'email': '*****@*****.**', 'password': '******' } ] users = [User(name=d['name'], email=d['email']) for d in user_data] # for i, user in enumerate(users): # user.hash_password(user_data[i]['password']) session.add_all(users) session.commit() print 'Database was successfully populated!'
except: card["title"] = a return card def find_info(entry): soup = BeautifulSoup(entry.description, "html.parser") secondtd = soup.findAll("td")[1] card = create_card(secondtd.strings) card_obj = session.query(Card).filter( Card.title == card["title"]).one_or_none() if card_obj is None: card_obj = Card(title=card["title"]) record = Record(qty=card["qty"], price=card["price"]) card_obj.records.append(record) print(f"{card_obj}") for record in card_obj.records: print(f"{record}") return card_obj if __name__ == "__main__": feed = get_feed() cards = [] for entry in feed.entries: cards.append(find_info(entry)) session.add_all(cards) session.commit()
from models import Base, User, StudyClass, UserClass, session, engine from models import File, Code Base.metadata.drop_all(engine) Base.metadata.create_all(engine) users = [User('admin'), User('user1'), User('user2')] classes = [StudyClass('python'), StudyClass('sessdsa')] users[0].email = '*****@*****.**' print(classes[0]) print(users[1].classes) users[1].classes.append(classes[0]) users[2].user_classes.append(UserClass(classes[0], users[2], 'teacher')) session.add_all(users) session.add_all(classes) session.commit() mimi = User.create('*****@*****.**', '123456') mimi.name = 'Mimi' file1 = File.create('image.png', '', mimi.id, b'12345', 'image/png') file2 = File.create('image.png', '', mimi.id, b'67890', 'image/png') print(File.get_file_by_id(file1.id)) print(File.get_files_by_owner(mimi.id)) file1.update(file1.name, 'This is an image', file1.owner, file1.content, file1.mime)
def commit_filings(self, urls): filings = map(self.create_filing, urls) session.add_all(filings) session.commit() return filings