def open_spider(self, spider): self.station = StationsData() self.ambienceSession = sessionmaker(bind=db_connect(AMBIENCE_DATABASE)) # scraper data base session self.scraperSession = sessionmaker(bind=db_connect(SCRAPER_DATABASE)) self.amb_session = self.ambienceSession() self.scraper_session = self.scraperSession()
def __init__(self): # ambiencedata data vase session self.anbiencedataSession = sessionmaker(bind=db_connect()) # scraper data base session SCRAPER_DATABASE = { 'drivername': 'postgres', 'host': 'localhost', 'port': '5432', 'username': '******', 'password': '******', 'database': 'ambiencedata' } self.scraperSession = sessionmaker(bind=db_connect(SCRAPER_DATABASE)) self.scraperSession = sessionmaker(bind=db_connect(SCRAPER_DATABASE))
def __init__(self): engine = db_connect() create_table(engine) self.Session = sessionmaker(bind=engine)
def __init__(self): # pass engine = db_connect() self.Session = sessionmaker(bind=engine)
pass session.add(station_data) session.commit() except: session.rollback() raise finally: session.close() return item if __name__ == "__main__": ambienceSession = sessionmaker(bind=db_connect(AMBIENCE_DATABASE)) amb_session = ambienceSession() station = amb_session.query(Station).all() counter = 4808 # last id 3811 for el in station[4808:]: # print("1", el.source.encode()) # print("2", el.source_id.encode("utf-8")) # print("3", el.id) sql = u"UPDATE scrapper_station_data SET source = '{source}', source_id = '{source_id}' WHERE st_id = {st_id}".format( source=el.source, source_id=el.source_id, st_id=el.id) # print(sql) amb_session.execute(sql) amb_session.commit()