async def main(): queue = asyncio.Queue() tasks = [] for i in range(IPN_WORKERS): task = asyncio.create_task(worker(queue)) tasks.append(task) ldb = db.DataBase() ipn_sync_block = int(ldb.get_setting("ipn_sync_block")) while True: last_block = ldb.get_last_block() logger.warning("last_block: %s" % last_block) if last_block > ipn_sync_block: for block in range(ipn_sync_block + 1, last_block + 1, 1): logger.warning("active block: %s" % block) for row in ldb.get_ipns(block) or []: queue.put_nowait(row) ldb.set_setting("ipn_sync_block", block) ipn_sync_block = block # waiting for next block usually each 10 minutes for BTC await asyncio.sleep(10) # code for unittests if TESTING: return None # check for missing workers for task in tasks: if task.done(): tasks.remove(task) tasks.append(asyncio.create_task(worker(queue)))
def _loggedin(self, user): email = user['email'] if len(email) == 0: return database = db.DataBase() u = database.get_user_by_email(email) key = None if u: key = database.create_user_key() u.key = key database.update_user(u) else: role = 0 supers = tornado.options.options.super_emails if supers and (email in supers): r = database.get_role_by_name('super') if r: role = r.id with SctpClientInstance() as sctp_client: keys = Keynodes(sctp_client) sc_session = logic.ScSession(self, sctp_client, keys) key = database.add_user(name=user['name'], email=email, avatar=user['picture'], role=role) self.set_secure_cookie(self.cookie_user_key, key, 1)
def setUp(self): db.DATABASE["database"] = "btc_ipn_test" self.ldb = db.DataBase() self.ldb.delete_block_hash(2, "hash-test") self.ldb.delete_block_hash(5, "hash-test") self.ldb.delete_block_hash(6, "hash-test") self.ldb.delete_block_hash(8, "hash-test")
def init(): global database config_file = Path(Path(__file__).parent/'data.config') with config_file.open() as f: data = f.read() config = json.loads(data) database = db.DataBase(config['database'], config['username'], config['password'])
def _loggedin(self, user): email = user['email'] if len(email) == 0: return database = db.DataBase() u = database.get_user_by_email(email) key = None if u: key = database.create_user_key() u.key = key database.update_user(u) else: role = 0 supers = tornado.options.options.super_emails if supers and (email in supers): r = database.get_role_by_name('super') if r: role = r.id key = database.add_user(name=user['name'], email=email, addr=1, avatar=user['picture'], role=role) print key self.set_secure_cookie(self.cookie_user_key, key, 1)
def __init__(self): self.gps = GPS.GPS() self.gpsData = GPS.GPSData self.dataBase = db.DataBase() self.gpsAr = [] self.f = "/app/storage/json/gps.json" threading.Thread.__init__(self)
def __init__(self, sensor, testID): self.sensor = sensor self.testID = testID threading.Thread.__init__(self) self.dataBase = db.DataBase() self.f = "/app/storage/json/acc.json" self.acc = []
def put(self): user_id = int(self.get_argument(u'id', -1)) if user_id < 0: raise tornado.web.HTTPError(400) new_rights = int(self.get_argument(u'v', -1)) if new_rights < 0 or new_rights > 255: raise tornado.web.HTTPError(400) if self.current_user.rights < new_rights: raise tornado.web.HTTPError(400) database = db.DataBase() u = database.get_user_by_id(user_id) if not u: raise tornado.web.HTTPError(500) r = database.get_user_role(u) if not r: raise tornado.web.HTTPError(500) if r.rights > self.current_user.rights: raise tornado.web.HTTPError(400) u.role = new_rights database.update_user(u) self.finish( json.dumps({ 'id': u.id, 'role': { 'value': u.role, 'name': database.get_role_by_id(u.role).name } }))
def clean_last_block(): chain = ChainData() ldb = db.DataBase() block = ldb.get_last_block() hash = ldb.get_block_hash(block) logger.warning("Cleaning block %s, hash %s" % (block, hash)) ldb.delete_block_hash(block, hash)
def __init__(self, host, port, testID): self.host = host self.port = port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.bind((self.host, self.port)) self.dataBase = db.DataBase() self.dataBase.setTestID(testID)
def get_current_user(self): key = self.get_secure_cookie(self.cookie_user_key, None, 1) database = db.DataBase() u = database.get_user_by_key(key) if u: return User(u, database) return None
def get(self): database = db.DataBase() self.finish( json.dumps( list( map(lambda r: { 'value': r.rights, 'name': r.name }, database.list_rights()))))
def get_received(address): """ Main service function, that returns the total received balance. :returns a tuple with total received (confirmed, unconfirmed) balance from address """ try: ldb = db.DataBase() return tuple(ldb.get_total_received(address)) except Exception as ex: logger.exception(ex)
def addData(self, data): DB = db.DataBase() sql = "SELECT * FROM %s WHERE url = '%s'" % (DB.table, data['url']) result = DB.query(sql) if len(result) == 0: data['enable'] = 1 DB.insert(data) else: id = result[0][0] DB.edit(id, data)
def getItems(self): dbItems = [] DB = db.DataBase() sql = 'SELECT * FROM %s' % DB.table if not self.checkAll: sql += ' WHERE online = 0' result = DB.query(sql) for r in result: dbItems.append(DB.fmtDbData(r)) return dbItems
def __init__(self, **kwargs): ## logging s_logger.info(hostname + ': Scraper initialized') ## attributes self.max_days = kwargs['max_days'] self.min_follower = kwargs['min_follower'] split = kwargs['split_users'] ## instances self.L = instaloader.Instaloader() self.db = db.DataBase(c_logger, s_logger, t_logger, split)
def __init__(self): self.db = db.DataBase() self.sdnctrls = set() self.gameserverList = self.db.getGameServer() self.userList = self.db.getUserMacList() cfg = self.db.getCfg() self.onlineTime = int(cfg[0][0]) self.sleepTime = int(cfg[0][1]) #self.db.clearLog() self.logcycle = 2 self.logtimer = self.logcycle self.usertimer = {}
def addData(self, data): DB = db.DataBase() sql = "SELECT * FROM %s WHERE url = '%s'" % (DB.table, data['url']) result = DB.query(sql) if len(result) == 0: DB.insert(data) else: id = result[0][0] if data['failcount'] >= 10: DB.delete(id) else: DB.edit(id, data)
def open(self): if self not in clients: clients.append(self) key = self.get_secure_cookie(base.BaseHandler.cookie_user_key, None, 1) database = db.DataBase() canEdit = False u = database.get_user_by_key(key) if u: canEdit = base.User._canEdit(database.get_user_role(u).rights) self.proxy = SocketProxy(self.socket_write, True)
def addData(self, data): DB = db.DataBase() sql = "SELECT * FROM %s WHERE url = '%s'" % (DB.table, data['url']) queryResult = DB.query(sql) print(queryResult) if type(queryResult) is list: if len(queryResult) == 0: data['enable'] = 1 DB.insert(data) else: id = queryResult[0][0] DB.edit(id, data) else: DB.insert(data)
def check_block(block): """ Check if block hash remain the same. If hash change, delete and update all the related info. """ chain = ChainData() ldb = db.DataBase() hash = ldb.get_block_hash(block) if chain.get_blockhash(block) != hash: if block and hash: logger.warning("Block %s change hash" % block) ldb.delete_block_hash(block, hash) balances, blockhash = chain.getblock_out_balances(block) for balance in balances: ldb.add_output(block, balance[0], balance[1], blockhash)
def main(): logging.config.fileConfig('logging.conf') parser = argparse.ArgumentParser(description='Elasticsearch gateway') parser.add_argument( '-l', dest='log_level', default='INFO', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], help='Set the log level') parser.add_argument('--db', dest='db_url', default='mongodb://localhost:27017/', help='Set url for database') args = parser.parse_args() logging.getLogger('').setLevel(getattr(logging, args.log_level)) db = database.DataBase(url=args.db_url) logger.debug('Connected to db: {}'.format(db)) es_url = os.environ.get('ES_ENDPOINT') es = elasticsearch.Elasticsearch(hosts=es_url) es_index = os.environ.get('ES_OSMTRACKER_INDEX') last_update = None generation = None while True: logger.debug('Refreshing cset index') if generation != db.generation: #ts = datetime.datetime.now() for cset in db.chgsets_find(state=None, after=last_update, sort=False): del cset['_id'] logging.info('Indexing cid {}'.format(cset['cid'])) res = es.index(index=es_index, doc_type='cset', id=cset['cid'], body=cset) logging.info('Done: {}'.format(res)) #last_update = ts generation = db.generation time.sleep(60)
def proseso(self, File): paleta_de_colores = sistemaDifuso(File) print(paleta_de_colores) hex_colors = [] for color in paleta_de_colores: colorrgb = color[::-1] mycolor = "#%02x%02x%02x" % (tuple(colorrgb)) print(mycolor) hex_colors.append(mycolor) Label(self.master, text=mycolor, fg='white', pady=50, bg=mycolor, font=('Ubuntu', 12)).pack(side=RIGHT, padx=0, pady=30) db_con = db.DataBase() print(hex_colors) db_con.create_color(str(hex_colors)) db_con.select_all_colors()
def main(): try: restaurants = query_api(DEFAULT_TERM, DEFAULT_LOCATION) except HTTPError as error: sys.exit( "Encountered HTTP error {0} on {1}:\n {2}\nAbort program.".format( error.code, error.url, error.read(), )) database = db.DataBase() database.crete_table() for restaurant in restaurants: tags = [] for tag in restaurant.get("categories"): tags.append(tag.get("title")) google_rating = get_google_rating( f'{restaurant.get("name")} {restaurant.get("location").get("address1")} restaurant.get("location").get("city")' ) website = get_website(restaurant.get("url")) print("Google:", google_rating) print("Website:", website) database.insert_restaurant( restaurant.get("name").replace("'", ""), telephone=restaurant.get("phone"), website=website, address=restaurant.get("location").get("address1").replace( "'", ""), city=restaurant.get("location").get("city").replace("'", ""), zip_code=restaurant.get("location").get("zip_code"), latitude=restaurant.get("coordinates").get("latitude"), longitude=restaurant.get("coordinates").get("longitude"), yelp_rating=restaurant.get("rating"), google_rating=google_rating, tags=tags, )
def main(product_id, date): path = os.path.join(conf.DATA_DIR, date) if not os.path.exists(path): os.mkdir(path) path = os.path.join(path, "midpage.%s" % product_id) stat_db = db.DataBase() product = stat_db.get_midpage_product(product_id) if product is None: logging.info(u"产品id不存在:%s" % product_id) return name = product["name"] side = product["side"] source = product["source"] source = source.replace("${DATE}", date) logging.info(u"产品%s(%s)开始导入:%s" % (name, product_id, date)) try: import_data(side, date, path, source, product_id) except: logging.exception('') logging.info(u"产品%s(%s)导入失败" % (name, product_id)) logging.info(u"产品%s(%s)导入结束" % (name, product_id))
async def worker(queue): ldb = None try: if not ldb: ldb = db.DataBase() address, max_confirms, url = await queue.get() confirmed, unconfirmed = ldb.get_total_received(address, confirms=max_confirms) data = { "address": address, "confirmed": confirmed, "unconfirmed": unconfirmed, "ipn_auth": IPN_AUTH } resp = requests.post(url=url, data=data) if resp.ok and resp.json() == data: ldb.set_ipn_status(address, 'done') else: ldb.set_ipn_status(address, 'fail') logger.error("IPN confirmation failed! data: {}".format(data)) queue.task_done() if TESTING: return data except Exception as ex: logger.exception(ex)
def __init__(self): self.T = tools.Tools() self.DB = db.DataBase()
def get(self): start = int(self.get_argument(u'p', 0)) database = db.DataBase() users = database.paginate_users(start, self.ITEMS_PER_PAGE) self.finish(json.dumps(users))
def main(): tornado.options.define("static_path", default="../client/static", help="path to static files directory", type=str) tornado.options.define("templates_path", default="../client/templates", help="path to template files directory", type=str) tornado.options.define("sctp_port", default=55770, help="port of sctp server", type=int) tornado.options.define("sctp_host", default="localhost", help="host of sctp server", type=str) tornado.options.define("event_wait_timeout", default=10, help="time to wait commands processing", type=int) tornado.options.define( "idtf_serach_limit", default=100, help="number of maximum results for searching by identifier", type=int) tornado.options.define("redis_host", default="localhost", help="host of redis server", type=str) tornado.options.define("redis_port", default=6379, help="port of redis server", type=int) tornado.options.define( "redis_db_idtf", default=0, help="number of redis database to store identifiers", type=int) tornado.options.define("redis_db_user", default=1, help="number of redis database to store user info", type=int) tornado.options.define("host", default="localhost", help="host name", type=str) tornado.options.define("port", default=8000, help="host port", type=int) tornado.options.define("google_client_id", default="", help="client id for google auth", type=str) tornado.options.define("google_client_secret", default="", help="client secret for google auth", type=str) tornado.options.define("apiai_subscription_key", default="", help="subscription key for api.ai", type=str) tornado.options.define("apiai_client_access_token", default="", help="client access token for api.ai", type=str) tornado.options.define("user_key_expire_time", default=600, help="user key expire time in seconds", type=int) tornado.options.define( "super_emails", default="", help="email of site super administrator (maximum rights)", type=list) tornado.options.define("db_path", default="data.db", help="path to database file", type=str) tornado.options.define("cfg", default="server.conf", help="path to configuration file", type=str) tornado.options.parse_command_line() if os.path.exists(tornado.options.options.cfg): tornado.options.parse_config_file(tornado.options.options.cfg) # prepare database database = db.DataBase() database.init() # prepare logger logger_sc.init() rules = [ (r"/", MainHandler), (r"/static/(.*)", NoCacheStaticHandler, { "path": tornado.options.options.static_path }), # api (r"/api/init/", api.Init), (r"/api/context/", api.ContextMenu), (r"/api/cmd/do/", api.CmdDo), (r"/api/cmd/text/", NaturalLanguageSearch), (r"/api/question/answer/translate/", api.QuestionAnswerTranslate), (r"/api/link/content/", api.LinkContent), (r"/api/link/format/", api.LinkFormat), (r"/api/languages/", api.Languages), (r"/api/languages/set/", api.LanguageSet), (r"/api/idtf/find/", api.IdtfFind), (r"/api/idtf/resolve/", api.IdtfResolve), (r"/api/addr/resolve/", api.AddrResolve), (r"/api/info/tooltip/", api.InfoTooltip), (r"/api/user/", api.User), (r"/auth/google$", auth.GoogleOAuth2LoginHandler), (r"/auth/logout$", auth.LogOut), (r"/admin$", admin.MainHandler), (r"/admin/users/get$", admin_users.UsersInfo), (r"/admin/users/set_rights$", admin_users.UserSetRights), (r"/admin/users/list_rights$", admin_users.UserListRights), (r"/sctp", ws.SocketHandler), ] application = tornado.web.Application( handlers=rules, cookie_secret=secret.get_secret(), login_url="/auth/google", template_path=tornado.options.options.templates_path, xsrf_cookies=False, gzip=True, google_oauth={ "key": tornado.options.options.google_client_id, "secret": tornado.options.options.google_client_secret }) application.listen(tornado.options.options.port) tornado.ioloop.PeriodicCallback(try_exit, 1000).start() tornado.ioloop.IOLoop.instance().start()
QComboBox, QMessageBox, QCheckBox, QTableWidget, QTableWidgetItem, QWidget) import db import des import latex_converter import valid des.initial() des.create16keys() # des.encrypt("admin") if not os.path.exists("confirmations.db"): # first use of program # print('exists now') database = db.DataBase() database.sql_create_tables() des.encrypt("admin") else: database = db.DataBase() # print('existed') employees = database.get_all_pracownicy() def updateEmps(): employeesTemp = database.get_all_pracownicy() # print(employeesTemp) employees.clear() for i in range(0, len(employeesTemp)): employees.append(employeesTemp[i]) # print(employees)