def loadDBRicette(self): cnx = DBConnector.connect() crs = cnx.cursor() with open("link_ricette.txt","r") as fp: for url in fp: print("Loading " + url + "\n") ric = Ricetta(url) # Aggiungo dati della ricetta add_ricetta = "INSERT INTO ricette(link,category,subcategory) VALUES(%s,%s,%s)" dati_ricetta = (url,ric.category,ric.subCategory) crs.execute(add_ricetta,dati_ricetta) ric_id = crs.lastrowid cnx.commit() # Aggiungo gli ingredienti della ricetta if ric.ingredients != None: for ing in ric.ingredients: try: crs.execute("select id from ingredienti where nome = %s and link = %s",ing) if crs._rowcount > 0: ing_id = crs.fetchone()[0] else: add_ingrediente = "INSERT INTO ingredienti(nome,link) VALUES(%s,%s)" dati_ingrediente = ing crs.execute(add_ingrediente,dati_ingrediente) cnx.commit() ing_id = crs.lastrowid add_ingrediente_ricetta = "INSERT INTO ingredienti_ricette(id_ricetta,id_ingrediente) VALUES(%s,%s)" dati_ingrediente_ricetta = (ric_id,ing_id) crs.execute(add_ingrediente_ricetta, dati_ingrediente_ricetta) cnx.commit() except mysql.connector.Error as Err: if Err.errno == 1062: # Violazione della chiave composta UNIQUE nome_link continue else: print(Err.msg) continue # Aggiungo gli step di preparazione della ricetta if ric.prep != None: len_step_prep = len(ric.prep) if len_step_prep > 0: tup_lst = list() query = "INSERT INTO preparazioni_ricette(id_ricetta,step,descrizione_step) VALUES(%s,%s,%s)" step = 0 for index in range(0,len_step_prep-1): tup_lst.append((ric_id,step,ric.prep[index],)) step += 1 tup_lst.append((ric_id, step, ric.prep[step],)) try: crs.executemany(query,tup_lst) except mysql.connector.Error as Err: print(Err.msg) continue cnx.close()
if __name__ == '__main__': args = get_args() if args.debug: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) fileHandler = logging.FileHandler('steamGrpService.log') fileHandler.setFormatter(logFormatter) log.addHandler(fileHandler) # preparing objects dbconnection = DBConnector() dbconnection.connect() parser = SiteParser() log.info('starting main loop') while True: items = [] with open(args.urljson, 'r') as f: log.debug('loading group urls from json...') urls_dict = json.load(f) for url_id in urls_dict: if urls_dict[url_id]['type'] in parser.canParse: items = items + parser.get_item_list( urls_dict[url_id]['url'], urls_dict[url_id]['type']) log.info('got {} announcements from rss feed'.format(items.__len__())) if items.__len__() > 0: